blob: 777bf3044d2287f95868d98a812ccad0ec8f2b4a [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Thierry Strudel3d639192016-09-09 11:52:26 -0700136cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
137const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
138extern pthread_mutex_t gCamLock;
139volatile uint32_t gCamHal3LogLevel = 1;
140extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700141
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800142// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143// The following Easel related variables must be protected by gHdrPlusClientLock.
144EaselManagerClient gEaselManagerClient;
145bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
146std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
147bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700148bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700149bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800151// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
152bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
154Mutex gHdrPlusClientLock; // Protect above Easel related variables.
155
Thierry Strudel3d639192016-09-09 11:52:26 -0700156
157const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
158 {"On", CAM_CDS_MODE_ON},
159 {"Off", CAM_CDS_MODE_OFF},
160 {"Auto",CAM_CDS_MODE_AUTO}
161};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700162const QCamera3HardwareInterface::QCameraMap<
163 camera_metadata_enum_android_video_hdr_mode_t,
164 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
165 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
166 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
167};
168
Thierry Strudel54dc9782017-02-15 12:12:10 -0800169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_binning_correction_mode_t,
171 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
172 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
173 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
174};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700175
176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_ir_mode_t,
178 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
179 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
180 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
181 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
182};
Thierry Strudel3d639192016-09-09 11:52:26 -0700183
184const QCamera3HardwareInterface::QCameraMap<
185 camera_metadata_enum_android_control_effect_mode_t,
186 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
187 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
188 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
189 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
190 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
191 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
192 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
193 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
194 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
195 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
196};
197
198const QCamera3HardwareInterface::QCameraMap<
199 camera_metadata_enum_android_control_awb_mode_t,
200 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
201 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
202 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
203 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
204 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
205 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
206 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
207 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
208 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
209 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
210};
211
212const QCamera3HardwareInterface::QCameraMap<
213 camera_metadata_enum_android_control_scene_mode_t,
214 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
215 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
216 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
217 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
218 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
219 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
220 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
222 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
223 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
224 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
225 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
226 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
227 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
228 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
229 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800230 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
231 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700232};
233
234const QCamera3HardwareInterface::QCameraMap<
235 camera_metadata_enum_android_control_af_mode_t,
236 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
237 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
238 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
239 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
240 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
241 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
242 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
243 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
244};
245
246const QCamera3HardwareInterface::QCameraMap<
247 camera_metadata_enum_android_color_correction_aberration_mode_t,
248 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
249 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
250 CAM_COLOR_CORRECTION_ABERRATION_OFF },
251 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
252 CAM_COLOR_CORRECTION_ABERRATION_FAST },
253 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
254 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_control_ae_antibanding_mode_t,
259 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
260 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
261 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
262 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_control_ae_mode_t,
268 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
269 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
270 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
271 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
272 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
273 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
274};
275
276const QCamera3HardwareInterface::QCameraMap<
277 camera_metadata_enum_android_flash_mode_t,
278 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
279 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
280 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
281 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_statistics_face_detect_mode_t,
286 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
287 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
288 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
289 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
294 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
295 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
296 CAM_FOCUS_UNCALIBRATED },
297 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
298 CAM_FOCUS_APPROXIMATE },
299 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
300 CAM_FOCUS_CALIBRATED }
301};
302
303const QCamera3HardwareInterface::QCameraMap<
304 camera_metadata_enum_android_lens_state_t,
305 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
306 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
307 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
308};
309
310const int32_t available_thumbnail_sizes[] = {0, 0,
311 176, 144,
312 240, 144,
313 256, 144,
314 240, 160,
315 256, 154,
316 240, 240,
317 320, 240};
318
319const QCamera3HardwareInterface::QCameraMap<
320 camera_metadata_enum_android_sensor_test_pattern_mode_t,
321 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
322 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
323 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
324 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
328};
329
330/* Since there is no mapping for all the options some Android enum are not listed.
331 * Also, the order in this list is important because while mapping from HAL to Android it will
332 * traverse from lower to higher index which means that for HAL values that are map to different
333 * Android values, the traverse logic will select the first one found.
334 */
335const QCamera3HardwareInterface::QCameraMap<
336 camera_metadata_enum_android_sensor_reference_illuminant1_t,
337 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
338 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
339 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
340 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
354};
355
356const QCamera3HardwareInterface::QCameraMap<
357 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
358 { 60, CAM_HFR_MODE_60FPS},
359 { 90, CAM_HFR_MODE_90FPS},
360 { 120, CAM_HFR_MODE_120FPS},
361 { 150, CAM_HFR_MODE_150FPS},
362 { 180, CAM_HFR_MODE_180FPS},
363 { 210, CAM_HFR_MODE_210FPS},
364 { 240, CAM_HFR_MODE_240FPS},
365 { 480, CAM_HFR_MODE_480FPS},
366};
367
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700368const QCamera3HardwareInterface::QCameraMap<
369 qcamera3_ext_instant_aec_mode_t,
370 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
371 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
372 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
373 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
374};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800375
376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_exposure_meter_mode_t,
378 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
379 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
380 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
381 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
382 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
383 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
386};
387
388const QCamera3HardwareInterface::QCameraMap<
389 qcamera3_ext_iso_mode_t,
390 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
391 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
392 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
393 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
394 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
395 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
396 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
397 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
398 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
399};
400
Thierry Strudel3d639192016-09-09 11:52:26 -0700401camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
402 .initialize = QCamera3HardwareInterface::initialize,
403 .configure_streams = QCamera3HardwareInterface::configure_streams,
404 .register_stream_buffers = NULL,
405 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
406 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
407 .get_metadata_vendor_tag_ops = NULL,
408 .dump = QCamera3HardwareInterface::dump,
409 .flush = QCamera3HardwareInterface::flush,
410 .reserved = {0},
411};
412
413// initialise to some default value
414uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
415
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700416static inline void logEaselEvent(const char *tag, const char *event) {
417 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
418 struct timespec ts = {};
419 static int64_t kMsPerSec = 1000;
420 static int64_t kNsPerMs = 1000000;
421 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
422 if (res != OK) {
423 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
424 } else {
425 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
426 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
427 }
428 }
429}
430
Thierry Strudel3d639192016-09-09 11:52:26 -0700431/*===========================================================================
432 * FUNCTION : QCamera3HardwareInterface
433 *
434 * DESCRIPTION: constructor of QCamera3HardwareInterface
435 *
436 * PARAMETERS :
437 * @cameraId : camera ID
438 *
439 * RETURN : none
440 *==========================================================================*/
441QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
442 const camera_module_callbacks_t *callbacks)
443 : mCameraId(cameraId),
444 mCameraHandle(NULL),
445 mCameraInitialized(false),
446 mCallbackOps(NULL),
447 mMetadataChannel(NULL),
448 mPictureChannel(NULL),
449 mRawChannel(NULL),
450 mSupportChannel(NULL),
451 mAnalysisChannel(NULL),
452 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700453 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800455 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mChannelHandle(0),
458 mFirstConfiguration(true),
459 mFlush(false),
460 mFlushPerf(false),
461 mParamHeap(NULL),
462 mParameters(NULL),
463 mPrevParameters(NULL),
464 m_bIsVideo(false),
465 m_bIs4KVideo(false),
466 m_bEisSupportedSize(false),
467 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800468 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700469 m_MobicatMask(0),
470 mMinProcessedFrameDuration(0),
471 mMinJpegFrameDuration(0),
472 mMinRawFrameDuration(0),
473 mMetaFrameCount(0U),
474 mUpdateDebugLevel(false),
475 mCallbacks(callbacks),
476 mCaptureIntent(0),
477 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700478 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800479 /* DevCamDebug metadata internal m control*/
480 mDevCamDebugMetaEnable(0),
481 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mBatchSize(0),
483 mToBeQueuedVidBufs(0),
484 mHFRVideoFps(DEFAULT_VIDEO_FPS),
485 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800486 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800487 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700488 mFirstFrameNumberInBatch(0),
489 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800490 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700491 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
492 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000493 mPDSupported(false),
494 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700495 mInstantAEC(false),
496 mResetInstantAEC(false),
497 mInstantAECSettledFrameNumber(0),
498 mAecSkipDisplayFrameBound(0),
499 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800500 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700502 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700503 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700504 mState(CLOSED),
505 mIsDeviceLinked(false),
506 mIsMainCamera(true),
507 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700508 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800509 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800510 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700511 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800512 mIsApInputUsedForHdrPlus(false),
513 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800514 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700515{
516 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700517 mCommon.init(gCamCapability[cameraId]);
518 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700519#ifndef USE_HAL_3_3
520 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
521#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700522 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700523#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700524 mCameraDevice.common.close = close_camera_device;
525 mCameraDevice.ops = &mCameraOps;
526 mCameraDevice.priv = this;
527 gCamCapability[cameraId]->version = CAM_HAL_V3;
528 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
529 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
530 gCamCapability[cameraId]->min_num_pp_bufs = 3;
531
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800532 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700533
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800534 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700535 mPendingLiveRequest = 0;
536 mCurrentRequestId = -1;
537 pthread_mutex_init(&mMutex, NULL);
538
539 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
540 mDefaultMetadata[i] = NULL;
541
542 // Getting system props of different kinds
543 char prop[PROPERTY_VALUE_MAX];
544 memset(prop, 0, sizeof(prop));
545 property_get("persist.camera.raw.dump", prop, "0");
546 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800547 property_get("persist.camera.hal3.force.hdr", prop, "0");
548 mForceHdrSnapshot = atoi(prop);
549
Thierry Strudel3d639192016-09-09 11:52:26 -0700550 if (mEnableRawDump)
551 LOGD("Raw dump from Camera HAL enabled");
552
553 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
554 memset(mLdafCalib, 0, sizeof(mLdafCalib));
555
556 memset(prop, 0, sizeof(prop));
557 property_get("persist.camera.tnr.preview", prop, "0");
558 m_bTnrPreview = (uint8_t)atoi(prop);
559
560 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800561 property_get("persist.camera.swtnr.preview", prop, "1");
562 m_bSwTnrPreview = (uint8_t)atoi(prop);
563
564 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700565 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700566 m_bTnrVideo = (uint8_t)atoi(prop);
567
568 memset(prop, 0, sizeof(prop));
569 property_get("persist.camera.avtimer.debug", prop, "0");
570 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800571 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700572
Thierry Strudel54dc9782017-02-15 12:12:10 -0800573 memset(prop, 0, sizeof(prop));
574 property_get("persist.camera.cacmode.disable", prop, "0");
575 m_cacModeDisabled = (uint8_t)atoi(prop);
576
Thierry Strudel3d639192016-09-09 11:52:26 -0700577 //Load and read GPU library.
578 lib_surface_utils = NULL;
579 LINK_get_surface_pixel_alignment = NULL;
580 mSurfaceStridePadding = CAM_PAD_TO_32;
581 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
582 if (lib_surface_utils) {
583 *(void **)&LINK_get_surface_pixel_alignment =
584 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
585 if (LINK_get_surface_pixel_alignment) {
586 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
587 }
588 dlclose(lib_surface_utils);
589 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700590
Emilian Peev0f3c3162017-03-15 12:57:46 +0000591 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
592 mPDSupported = (0 <= mPDIndex) ? true : false;
593
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700594 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700595}
596
597/*===========================================================================
598 * FUNCTION : ~QCamera3HardwareInterface
599 *
600 * DESCRIPTION: destructor of QCamera3HardwareInterface
601 *
602 * PARAMETERS : none
603 *
604 * RETURN : none
605 *==========================================================================*/
606QCamera3HardwareInterface::~QCamera3HardwareInterface()
607{
608 LOGD("E");
609
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800610 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700611
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800612 // Disable power hint and enable the perf lock for close camera
613 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
614 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
615
616 // unlink of dualcam during close camera
617 if (mIsDeviceLinked) {
618 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
619 &m_pDualCamCmdPtr->bundle_info;
620 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
621 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
622 pthread_mutex_lock(&gCamLock);
623
624 if (mIsMainCamera == 1) {
625 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
626 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
627 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
628 // related session id should be session id of linked session
629 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
630 } else {
631 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
632 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
633 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
634 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
635 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800636 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800637 pthread_mutex_unlock(&gCamLock);
638
639 rc = mCameraHandle->ops->set_dual_cam_cmd(
640 mCameraHandle->camera_handle);
641 if (rc < 0) {
642 LOGE("Dualcam: Unlink failed, but still proceed to close");
643 }
644 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700645
646 /* We need to stop all streams before deleting any stream */
647 if (mRawDumpChannel) {
648 mRawDumpChannel->stop();
649 }
650
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700651 if (mHdrPlusRawSrcChannel) {
652 mHdrPlusRawSrcChannel->stop();
653 }
654
Thierry Strudel3d639192016-09-09 11:52:26 -0700655 // NOTE: 'camera3_stream_t *' objects are already freed at
656 // this stage by the framework
657 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
658 it != mStreamInfo.end(); it++) {
659 QCamera3ProcessingChannel *channel = (*it)->channel;
660 if (channel) {
661 channel->stop();
662 }
663 }
664 if (mSupportChannel)
665 mSupportChannel->stop();
666
667 if (mAnalysisChannel) {
668 mAnalysisChannel->stop();
669 }
670 if (mMetadataChannel) {
671 mMetadataChannel->stop();
672 }
673 if (mChannelHandle) {
674 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
675 mChannelHandle);
676 LOGD("stopping channel %d", mChannelHandle);
677 }
678
679 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
680 it != mStreamInfo.end(); it++) {
681 QCamera3ProcessingChannel *channel = (*it)->channel;
682 if (channel)
683 delete channel;
684 free (*it);
685 }
686 if (mSupportChannel) {
687 delete mSupportChannel;
688 mSupportChannel = NULL;
689 }
690
691 if (mAnalysisChannel) {
692 delete mAnalysisChannel;
693 mAnalysisChannel = NULL;
694 }
695 if (mRawDumpChannel) {
696 delete mRawDumpChannel;
697 mRawDumpChannel = NULL;
698 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700699 if (mHdrPlusRawSrcChannel) {
700 delete mHdrPlusRawSrcChannel;
701 mHdrPlusRawSrcChannel = NULL;
702 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700703 if (mDummyBatchChannel) {
704 delete mDummyBatchChannel;
705 mDummyBatchChannel = NULL;
706 }
707
708 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800709 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700710
711 if (mMetadataChannel) {
712 delete mMetadataChannel;
713 mMetadataChannel = NULL;
714 }
715
716 /* Clean up all channels */
717 if (mCameraInitialized) {
718 if(!mFirstConfiguration){
719 //send the last unconfigure
720 cam_stream_size_info_t stream_config_info;
721 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
722 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
723 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800724 m_bIs4KVideo ? 0 :
725 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700726 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700727 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
728 stream_config_info);
729 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
730 if (rc < 0) {
731 LOGE("set_parms failed for unconfigure");
732 }
733 }
734 deinitParameters();
735 }
736
737 if (mChannelHandle) {
738 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
739 mChannelHandle);
740 LOGH("deleting channel %d", mChannelHandle);
741 mChannelHandle = 0;
742 }
743
744 if (mState != CLOSED)
745 closeCamera();
746
747 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
748 req.mPendingBufferList.clear();
749 }
750 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700751 for (pendingRequestIterator i = mPendingRequestsList.begin();
752 i != mPendingRequestsList.end();) {
753 i = erasePendingRequest(i);
754 }
755 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
756 if (mDefaultMetadata[i])
757 free_camera_metadata(mDefaultMetadata[i]);
758
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800759 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700760
761 pthread_cond_destroy(&mRequestCond);
762
763 pthread_cond_destroy(&mBuffersCond);
764
765 pthread_mutex_destroy(&mMutex);
766 LOGD("X");
767}
768
769/*===========================================================================
770 * FUNCTION : erasePendingRequest
771 *
772 * DESCRIPTION: function to erase a desired pending request after freeing any
773 * allocated memory
774 *
775 * PARAMETERS :
776 * @i : iterator pointing to pending request to be erased
777 *
778 * RETURN : iterator pointing to the next request
779 *==========================================================================*/
780QCamera3HardwareInterface::pendingRequestIterator
781 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
782{
783 if (i->input_buffer != NULL) {
784 free(i->input_buffer);
785 i->input_buffer = NULL;
786 }
787 if (i->settings != NULL)
788 free_camera_metadata((camera_metadata_t*)i->settings);
789 return mPendingRequestsList.erase(i);
790}
791
792/*===========================================================================
793 * FUNCTION : camEvtHandle
794 *
795 * DESCRIPTION: Function registered to mm-camera-interface to handle events
796 *
797 * PARAMETERS :
798 * @camera_handle : interface layer camera handle
799 * @evt : ptr to event
800 * @user_data : user data ptr
801 *
802 * RETURN : none
803 *==========================================================================*/
804void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
805 mm_camera_event_t *evt,
806 void *user_data)
807{
808 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
809 if (obj && evt) {
810 switch(evt->server_event_type) {
811 case CAM_EVENT_TYPE_DAEMON_DIED:
812 pthread_mutex_lock(&obj->mMutex);
813 obj->mState = ERROR;
814 pthread_mutex_unlock(&obj->mMutex);
815 LOGE("Fatal, camera daemon died");
816 break;
817
818 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
819 LOGD("HAL got request pull from Daemon");
820 pthread_mutex_lock(&obj->mMutex);
821 obj->mWokenUpByDaemon = true;
822 obj->unblockRequestIfNecessary();
823 pthread_mutex_unlock(&obj->mMutex);
824 break;
825
826 default:
827 LOGW("Warning: Unhandled event %d",
828 evt->server_event_type);
829 break;
830 }
831 } else {
832 LOGE("NULL user_data/evt");
833 }
834}
835
836/*===========================================================================
837 * FUNCTION : openCamera
838 *
839 * DESCRIPTION: open camera
840 *
841 * PARAMETERS :
842 * @hw_device : double ptr for camera device struct
843 *
844 * RETURN : int32_t type of status
845 * NO_ERROR -- success
846 * none-zero failure code
847 *==========================================================================*/
848int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
849{
850 int rc = 0;
851 if (mState != CLOSED) {
852 *hw_device = NULL;
853 return PERMISSION_DENIED;
854 }
855
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700856 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800857 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700858 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
859 mCameraId);
860
861 rc = openCamera();
862 if (rc == 0) {
863 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800864 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700865 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800866 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700867
Thierry Strudel3d639192016-09-09 11:52:26 -0700868 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
869 mCameraId, rc);
870
871 if (rc == NO_ERROR) {
872 mState = OPENED;
873 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800874
Thierry Strudel3d639192016-09-09 11:52:26 -0700875 return rc;
876}
877
878/*===========================================================================
879 * FUNCTION : openCamera
880 *
881 * DESCRIPTION: open camera
882 *
883 * PARAMETERS : none
884 *
885 * RETURN : int32_t type of status
886 * NO_ERROR -- success
887 * none-zero failure code
888 *==========================================================================*/
889int QCamera3HardwareInterface::openCamera()
890{
891 int rc = 0;
892 char value[PROPERTY_VALUE_MAX];
893
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800894 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700895 if (mCameraHandle) {
896 LOGE("Failure: Camera already opened");
897 return ALREADY_EXISTS;
898 }
899
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700900 {
901 Mutex::Autolock l(gHdrPlusClientLock);
902 if (gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700903 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700904 rc = gEaselManagerClient.resume();
905 if (rc != 0) {
906 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
907 return rc;
908 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800909 }
910 }
911
Thierry Strudel3d639192016-09-09 11:52:26 -0700912 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
913 if (rc < 0) {
914 LOGE("Failed to reserve flash for camera id: %d",
915 mCameraId);
916 return UNKNOWN_ERROR;
917 }
918
919 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
920 if (rc) {
921 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
922 return rc;
923 }
924
925 if (!mCameraHandle) {
926 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
927 return -ENODEV;
928 }
929
930 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
931 camEvtHandle, (void *)this);
932
933 if (rc < 0) {
934 LOGE("Error, failed to register event callback");
935 /* Not closing camera here since it is already handled in destructor */
936 return FAILED_TRANSACTION;
937 }
938
939 mExifParams.debug_params =
940 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
941 if (mExifParams.debug_params) {
942 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
943 } else {
944 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
945 return NO_MEMORY;
946 }
947 mFirstConfiguration = true;
948
949 //Notify display HAL that a camera session is active.
950 //But avoid calling the same during bootup because camera service might open/close
951 //cameras at boot time during its initialization and display service will also internally
952 //wait for camera service to initialize first while calling this display API, resulting in a
953 //deadlock situation. Since boot time camera open/close calls are made only to fetch
954 //capabilities, no need of this display bw optimization.
955 //Use "service.bootanim.exit" property to know boot status.
956 property_get("service.bootanim.exit", value, "0");
957 if (atoi(value) == 1) {
958 pthread_mutex_lock(&gCamLock);
959 if (gNumCameraSessions++ == 0) {
960 setCameraLaunchStatus(true);
961 }
962 pthread_mutex_unlock(&gCamLock);
963 }
964
965 //fill the session id needed while linking dual cam
966 pthread_mutex_lock(&gCamLock);
967 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
968 &sessionId[mCameraId]);
969 pthread_mutex_unlock(&gCamLock);
970
971 if (rc < 0) {
972 LOGE("Error, failed to get sessiion id");
973 return UNKNOWN_ERROR;
974 } else {
975 //Allocate related cam sync buffer
976 //this is needed for the payload that goes along with bundling cmd for related
977 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700978 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
979 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700980 if(rc != OK) {
981 rc = NO_MEMORY;
982 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
983 return NO_MEMORY;
984 }
985
986 //Map memory for related cam sync buffer
987 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700988 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
989 m_pDualCamCmdHeap->getFd(0),
990 sizeof(cam_dual_camera_cmd_info_t),
991 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700992 if(rc < 0) {
993 LOGE("Dualcam: failed to map Related cam sync buffer");
994 rc = FAILED_TRANSACTION;
995 return NO_MEMORY;
996 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700997 m_pDualCamCmdPtr =
998 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700999 }
1000
1001 LOGH("mCameraId=%d",mCameraId);
1002
1003 return NO_ERROR;
1004}
1005
1006/*===========================================================================
1007 * FUNCTION : closeCamera
1008 *
1009 * DESCRIPTION: close camera
1010 *
1011 * PARAMETERS : none
1012 *
1013 * RETURN : int32_t type of status
1014 * NO_ERROR -- success
1015 * none-zero failure code
1016 *==========================================================================*/
1017int QCamera3HardwareInterface::closeCamera()
1018{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001019 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001020 int rc = NO_ERROR;
1021 char value[PROPERTY_VALUE_MAX];
1022
1023 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1024 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001025
1026 // unmap memory for related cam sync buffer
1027 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001028 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001029 if (NULL != m_pDualCamCmdHeap) {
1030 m_pDualCamCmdHeap->deallocate();
1031 delete m_pDualCamCmdHeap;
1032 m_pDualCamCmdHeap = NULL;
1033 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001034 }
1035
Thierry Strudel3d639192016-09-09 11:52:26 -07001036 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1037 mCameraHandle = NULL;
1038
1039 //reset session id to some invalid id
1040 pthread_mutex_lock(&gCamLock);
1041 sessionId[mCameraId] = 0xDEADBEEF;
1042 pthread_mutex_unlock(&gCamLock);
1043
1044 //Notify display HAL that there is no active camera session
1045 //but avoid calling the same during bootup. Refer to openCamera
1046 //for more details.
1047 property_get("service.bootanim.exit", value, "0");
1048 if (atoi(value) == 1) {
1049 pthread_mutex_lock(&gCamLock);
1050 if (--gNumCameraSessions == 0) {
1051 setCameraLaunchStatus(false);
1052 }
1053 pthread_mutex_unlock(&gCamLock);
1054 }
1055
Thierry Strudel3d639192016-09-09 11:52:26 -07001056 if (mExifParams.debug_params) {
1057 free(mExifParams.debug_params);
1058 mExifParams.debug_params = NULL;
1059 }
1060 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1061 LOGW("Failed to release flash for camera id: %d",
1062 mCameraId);
1063 }
1064 mState = CLOSED;
1065 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1066 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001067
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001068 {
1069 Mutex::Autolock l(gHdrPlusClientLock);
1070 if (gHdrPlusClient != nullptr) {
1071 // Disable HDR+ mode.
1072 disableHdrPlusModeLocked();
1073 // Disconnect Easel if it's connected.
1074 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1075 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001076 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001077
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001078 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001079 rc = gEaselManagerClient.stopMipi(mCameraId);
1080 if (rc != 0) {
1081 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1082 }
1083
1084 rc = gEaselManagerClient.suspend();
1085 if (rc != 0) {
1086 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1087 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001088 }
1089 }
1090
Thierry Strudel3d639192016-09-09 11:52:26 -07001091 return rc;
1092}
1093
1094/*===========================================================================
1095 * FUNCTION : initialize
1096 *
1097 * DESCRIPTION: Initialize frameworks callback functions
1098 *
1099 * PARAMETERS :
1100 * @callback_ops : callback function to frameworks
1101 *
1102 * RETURN :
1103 *
1104 *==========================================================================*/
1105int QCamera3HardwareInterface::initialize(
1106 const struct camera3_callback_ops *callback_ops)
1107{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001108 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001109 int rc;
1110
1111 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1112 pthread_mutex_lock(&mMutex);
1113
1114 // Validate current state
1115 switch (mState) {
1116 case OPENED:
1117 /* valid state */
1118 break;
1119 default:
1120 LOGE("Invalid state %d", mState);
1121 rc = -ENODEV;
1122 goto err1;
1123 }
1124
1125 rc = initParameters();
1126 if (rc < 0) {
1127 LOGE("initParamters failed %d", rc);
1128 goto err1;
1129 }
1130 mCallbackOps = callback_ops;
1131
1132 mChannelHandle = mCameraHandle->ops->add_channel(
1133 mCameraHandle->camera_handle, NULL, NULL, this);
1134 if (mChannelHandle == 0) {
1135 LOGE("add_channel failed");
1136 rc = -ENOMEM;
1137 pthread_mutex_unlock(&mMutex);
1138 return rc;
1139 }
1140
1141 pthread_mutex_unlock(&mMutex);
1142 mCameraInitialized = true;
1143 mState = INITIALIZED;
1144 LOGI("X");
1145 return 0;
1146
1147err1:
1148 pthread_mutex_unlock(&mMutex);
1149 return rc;
1150}
1151
1152/*===========================================================================
1153 * FUNCTION : validateStreamDimensions
1154 *
1155 * DESCRIPTION: Check if the configuration requested are those advertised
1156 *
1157 * PARAMETERS :
1158 * @stream_list : streams to be configured
1159 *
1160 * RETURN :
1161 *
1162 *==========================================================================*/
1163int QCamera3HardwareInterface::validateStreamDimensions(
1164 camera3_stream_configuration_t *streamList)
1165{
1166 int rc = NO_ERROR;
1167 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001168 uint32_t depthWidth = 0;
1169 uint32_t depthHeight = 0;
1170 if (mPDSupported) {
1171 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1172 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1173 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001174
1175 camera3_stream_t *inputStream = NULL;
1176 /*
1177 * Loop through all streams to find input stream if it exists*
1178 */
1179 for (size_t i = 0; i< streamList->num_streams; i++) {
1180 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1181 if (inputStream != NULL) {
1182 LOGE("Error, Multiple input streams requested");
1183 return -EINVAL;
1184 }
1185 inputStream = streamList->streams[i];
1186 }
1187 }
1188 /*
1189 * Loop through all streams requested in configuration
1190 * Check if unsupported sizes have been requested on any of them
1191 */
1192 for (size_t j = 0; j < streamList->num_streams; j++) {
1193 bool sizeFound = false;
1194 camera3_stream_t *newStream = streamList->streams[j];
1195
1196 uint32_t rotatedHeight = newStream->height;
1197 uint32_t rotatedWidth = newStream->width;
1198 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1199 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1200 rotatedHeight = newStream->width;
1201 rotatedWidth = newStream->height;
1202 }
1203
1204 /*
1205 * Sizes are different for each type of stream format check against
1206 * appropriate table.
1207 */
1208 switch (newStream->format) {
1209 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1210 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1211 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001212 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1213 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1214 mPDSupported) {
1215 if ((depthWidth == newStream->width) &&
1216 (depthHeight == newStream->height)) {
1217 sizeFound = true;
1218 }
1219 break;
1220 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001221 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1222 for (size_t i = 0; i < count; i++) {
1223 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1224 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1225 sizeFound = true;
1226 break;
1227 }
1228 }
1229 break;
1230 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001231 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1232 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001233 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001234 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001235 if ((depthSamplesCount == newStream->width) &&
1236 (1 == newStream->height)) {
1237 sizeFound = true;
1238 }
1239 break;
1240 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001241 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1242 /* Verify set size against generated sizes table */
1243 for (size_t i = 0; i < count; i++) {
1244 if (((int32_t)rotatedWidth ==
1245 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1246 ((int32_t)rotatedHeight ==
1247 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1248 sizeFound = true;
1249 break;
1250 }
1251 }
1252 break;
1253 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1254 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1255 default:
1256 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1257 || newStream->stream_type == CAMERA3_STREAM_INPUT
1258 || IS_USAGE_ZSL(newStream->usage)) {
1259 if (((int32_t)rotatedWidth ==
1260 gCamCapability[mCameraId]->active_array_size.width) &&
1261 ((int32_t)rotatedHeight ==
1262 gCamCapability[mCameraId]->active_array_size.height)) {
1263 sizeFound = true;
1264 break;
1265 }
1266 /* We could potentially break here to enforce ZSL stream
1267 * set from frameworks always is full active array size
1268 * but it is not clear from the spc if framework will always
1269 * follow that, also we have logic to override to full array
1270 * size, so keeping the logic lenient at the moment
1271 */
1272 }
1273 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1274 MAX_SIZES_CNT);
1275 for (size_t i = 0; i < count; i++) {
1276 if (((int32_t)rotatedWidth ==
1277 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1278 ((int32_t)rotatedHeight ==
1279 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1280 sizeFound = true;
1281 break;
1282 }
1283 }
1284 break;
1285 } /* End of switch(newStream->format) */
1286
1287 /* We error out even if a single stream has unsupported size set */
1288 if (!sizeFound) {
1289 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1290 rotatedWidth, rotatedHeight, newStream->format,
1291 gCamCapability[mCameraId]->active_array_size.width,
1292 gCamCapability[mCameraId]->active_array_size.height);
1293 rc = -EINVAL;
1294 break;
1295 }
1296 } /* End of for each stream */
1297 return rc;
1298}
1299
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001300/*===========================================================================
1301 * FUNCTION : validateUsageFlags
1302 *
1303 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1304 *
1305 * PARAMETERS :
1306 * @stream_list : streams to be configured
1307 *
1308 * RETURN :
1309 * NO_ERROR if the usage flags are supported
1310 * error code if usage flags are not supported
1311 *
1312 *==========================================================================*/
1313int QCamera3HardwareInterface::validateUsageFlags(
1314 const camera3_stream_configuration_t* streamList)
1315{
1316 for (size_t j = 0; j < streamList->num_streams; j++) {
1317 const camera3_stream_t *newStream = streamList->streams[j];
1318
1319 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1320 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1321 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1322 continue;
1323 }
1324
1325 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1326 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1327 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1328 bool forcePreviewUBWC = true;
1329 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1330 forcePreviewUBWC = false;
1331 }
1332 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1333 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1334 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1335 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1336 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1337 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1338
1339 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1340 // So color spaces will always match.
1341
1342 // Check whether underlying formats of shared streams match.
1343 if (isVideo && isPreview && videoFormat != previewFormat) {
1344 LOGE("Combined video and preview usage flag is not supported");
1345 return -EINVAL;
1346 }
1347 if (isPreview && isZSL && previewFormat != zslFormat) {
1348 LOGE("Combined preview and zsl usage flag is not supported");
1349 return -EINVAL;
1350 }
1351 if (isVideo && isZSL && videoFormat != zslFormat) {
1352 LOGE("Combined video and zsl usage flag is not supported");
1353 return -EINVAL;
1354 }
1355 }
1356 return NO_ERROR;
1357}
1358
1359/*===========================================================================
1360 * FUNCTION : validateUsageFlagsForEis
1361 *
1362 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1363 *
1364 * PARAMETERS :
1365 * @stream_list : streams to be configured
1366 *
1367 * RETURN :
1368 * NO_ERROR if the usage flags are supported
1369 * error code if usage flags are not supported
1370 *
1371 *==========================================================================*/
1372int QCamera3HardwareInterface::validateUsageFlagsForEis(
1373 const camera3_stream_configuration_t* streamList)
1374{
1375 for (size_t j = 0; j < streamList->num_streams; j++) {
1376 const camera3_stream_t *newStream = streamList->streams[j];
1377
1378 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1379 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1380
1381 // Because EIS is "hard-coded" for certain use case, and current
1382 // implementation doesn't support shared preview and video on the same
1383 // stream, return failure if EIS is forced on.
1384 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1385 LOGE("Combined video and preview usage flag is not supported due to EIS");
1386 return -EINVAL;
1387 }
1388 }
1389 return NO_ERROR;
1390}
1391
Thierry Strudel3d639192016-09-09 11:52:26 -07001392/*==============================================================================
1393 * FUNCTION : isSupportChannelNeeded
1394 *
1395 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1396 *
1397 * PARAMETERS :
1398 * @stream_list : streams to be configured
1399 * @stream_config_info : the config info for streams to be configured
1400 *
1401 * RETURN : Boolen true/false decision
1402 *
1403 *==========================================================================*/
1404bool QCamera3HardwareInterface::isSupportChannelNeeded(
1405 camera3_stream_configuration_t *streamList,
1406 cam_stream_size_info_t stream_config_info)
1407{
1408 uint32_t i;
1409 bool pprocRequested = false;
1410 /* Check for conditions where PProc pipeline does not have any streams*/
1411 for (i = 0; i < stream_config_info.num_streams; i++) {
1412 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1413 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1414 pprocRequested = true;
1415 break;
1416 }
1417 }
1418
1419 if (pprocRequested == false )
1420 return true;
1421
1422 /* Dummy stream needed if only raw or jpeg streams present */
1423 for (i = 0; i < streamList->num_streams; i++) {
1424 switch(streamList->streams[i]->format) {
1425 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1426 case HAL_PIXEL_FORMAT_RAW10:
1427 case HAL_PIXEL_FORMAT_RAW16:
1428 case HAL_PIXEL_FORMAT_BLOB:
1429 break;
1430 default:
1431 return false;
1432 }
1433 }
1434 return true;
1435}
1436
1437/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001438 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001439 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001440 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001441 *
1442 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001443 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001444 *
1445 * RETURN : int32_t type of status
1446 * NO_ERROR -- success
1447 * none-zero failure code
1448 *
1449 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001450int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001451{
1452 int32_t rc = NO_ERROR;
1453
1454 cam_dimension_t max_dim = {0, 0};
1455 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1456 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1457 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1458 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1459 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1460 }
1461
1462 clear_metadata_buffer(mParameters);
1463
1464 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1465 max_dim);
1466 if (rc != NO_ERROR) {
1467 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1468 return rc;
1469 }
1470
1471 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1472 if (rc != NO_ERROR) {
1473 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1474 return rc;
1475 }
1476
1477 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001478 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001479
1480 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1481 mParameters);
1482 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001483 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001484 return rc;
1485 }
1486
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001487 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001488 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1489 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1490 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1491 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1492 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001493
1494 return rc;
1495}
1496
1497/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001498 * FUNCTION : addToPPFeatureMask
1499 *
1500 * DESCRIPTION: add additional features to pp feature mask based on
1501 * stream type and usecase
1502 *
1503 * PARAMETERS :
1504 * @stream_format : stream type for feature mask
1505 * @stream_idx : stream idx within postprocess_mask list to change
1506 *
1507 * RETURN : NULL
1508 *
1509 *==========================================================================*/
1510void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1511 uint32_t stream_idx)
1512{
1513 char feature_mask_value[PROPERTY_VALUE_MAX];
1514 cam_feature_mask_t feature_mask;
1515 int args_converted;
1516 int property_len;
1517
1518 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001519#ifdef _LE_CAMERA_
1520 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1521 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1522 property_len = property_get("persist.camera.hal3.feature",
1523 feature_mask_value, swtnr_feature_mask_value);
1524#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001525 property_len = property_get("persist.camera.hal3.feature",
1526 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001527#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001528 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1529 (feature_mask_value[1] == 'x')) {
1530 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1531 } else {
1532 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1533 }
1534 if (1 != args_converted) {
1535 feature_mask = 0;
1536 LOGE("Wrong feature mask %s", feature_mask_value);
1537 return;
1538 }
1539
1540 switch (stream_format) {
1541 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1542 /* Add LLVD to pp feature mask only if video hint is enabled */
1543 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1544 mStreamConfigInfo.postprocess_mask[stream_idx]
1545 |= CAM_QTI_FEATURE_SW_TNR;
1546 LOGH("Added SW TNR to pp feature mask");
1547 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1548 mStreamConfigInfo.postprocess_mask[stream_idx]
1549 |= CAM_QCOM_FEATURE_LLVD;
1550 LOGH("Added LLVD SeeMore to pp feature mask");
1551 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001552 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1553 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1554 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1555 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001556 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1557 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1558 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1559 CAM_QTI_FEATURE_BINNING_CORRECTION;
1560 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001561 break;
1562 }
1563 default:
1564 break;
1565 }
1566 LOGD("PP feature mask %llx",
1567 mStreamConfigInfo.postprocess_mask[stream_idx]);
1568}
1569
1570/*==============================================================================
1571 * FUNCTION : updateFpsInPreviewBuffer
1572 *
1573 * DESCRIPTION: update FPS information in preview buffer.
1574 *
1575 * PARAMETERS :
1576 * @metadata : pointer to metadata buffer
1577 * @frame_number: frame_number to look for in pending buffer list
1578 *
1579 * RETURN : None
1580 *
1581 *==========================================================================*/
1582void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1583 uint32_t frame_number)
1584{
1585 // Mark all pending buffers for this particular request
1586 // with corresponding framerate information
1587 for (List<PendingBuffersInRequest>::iterator req =
1588 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1589 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1590 for(List<PendingBufferInfo>::iterator j =
1591 req->mPendingBufferList.begin();
1592 j != req->mPendingBufferList.end(); j++) {
1593 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1594 if ((req->frame_number == frame_number) &&
1595 (channel->getStreamTypeMask() &
1596 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1597 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1598 CAM_INTF_PARM_FPS_RANGE, metadata) {
1599 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1600 struct private_handle_t *priv_handle =
1601 (struct private_handle_t *)(*(j->buffer));
1602 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1603 }
1604 }
1605 }
1606 }
1607}
1608
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001609/*==============================================================================
1610 * FUNCTION : updateTimeStampInPendingBuffers
1611 *
1612 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1613 * of a frame number
1614 *
1615 * PARAMETERS :
1616 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1617 * @timestamp : timestamp to be set
1618 *
1619 * RETURN : None
1620 *
1621 *==========================================================================*/
1622void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1623 uint32_t frameNumber, nsecs_t timestamp)
1624{
1625 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1626 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1627 if (req->frame_number != frameNumber)
1628 continue;
1629
1630 for (auto k = req->mPendingBufferList.begin();
1631 k != req->mPendingBufferList.end(); k++ ) {
1632 struct private_handle_t *priv_handle =
1633 (struct private_handle_t *) (*(k->buffer));
1634 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1635 }
1636 }
1637 return;
1638}
1639
Thierry Strudel3d639192016-09-09 11:52:26 -07001640/*===========================================================================
1641 * FUNCTION : configureStreams
1642 *
1643 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1644 * and output streams.
1645 *
1646 * PARAMETERS :
1647 * @stream_list : streams to be configured
1648 *
1649 * RETURN :
1650 *
1651 *==========================================================================*/
1652int QCamera3HardwareInterface::configureStreams(
1653 camera3_stream_configuration_t *streamList)
1654{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001655 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001656 int rc = 0;
1657
1658 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001659 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001660 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001661 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001662
1663 return rc;
1664}
1665
1666/*===========================================================================
1667 * FUNCTION : configureStreamsPerfLocked
1668 *
1669 * DESCRIPTION: configureStreams while perfLock is held.
1670 *
1671 * PARAMETERS :
1672 * @stream_list : streams to be configured
1673 *
1674 * RETURN : int32_t type of status
1675 * NO_ERROR -- success
1676 * none-zero failure code
1677 *==========================================================================*/
1678int QCamera3HardwareInterface::configureStreamsPerfLocked(
1679 camera3_stream_configuration_t *streamList)
1680{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001681 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001682 int rc = 0;
1683
1684 // Sanity check stream_list
1685 if (streamList == NULL) {
1686 LOGE("NULL stream configuration");
1687 return BAD_VALUE;
1688 }
1689 if (streamList->streams == NULL) {
1690 LOGE("NULL stream list");
1691 return BAD_VALUE;
1692 }
1693
1694 if (streamList->num_streams < 1) {
1695 LOGE("Bad number of streams requested: %d",
1696 streamList->num_streams);
1697 return BAD_VALUE;
1698 }
1699
1700 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1701 LOGE("Maximum number of streams %d exceeded: %d",
1702 MAX_NUM_STREAMS, streamList->num_streams);
1703 return BAD_VALUE;
1704 }
1705
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001706 rc = validateUsageFlags(streamList);
1707 if (rc != NO_ERROR) {
1708 return rc;
1709 }
1710
Thierry Strudel3d639192016-09-09 11:52:26 -07001711 mOpMode = streamList->operation_mode;
1712 LOGD("mOpMode: %d", mOpMode);
1713
1714 /* first invalidate all the steams in the mStreamList
1715 * if they appear again, they will be validated */
1716 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1717 it != mStreamInfo.end(); it++) {
1718 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1719 if (channel) {
1720 channel->stop();
1721 }
1722 (*it)->status = INVALID;
1723 }
1724
1725 if (mRawDumpChannel) {
1726 mRawDumpChannel->stop();
1727 delete mRawDumpChannel;
1728 mRawDumpChannel = NULL;
1729 }
1730
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001731 if (mHdrPlusRawSrcChannel) {
1732 mHdrPlusRawSrcChannel->stop();
1733 delete mHdrPlusRawSrcChannel;
1734 mHdrPlusRawSrcChannel = NULL;
1735 }
1736
Thierry Strudel3d639192016-09-09 11:52:26 -07001737 if (mSupportChannel)
1738 mSupportChannel->stop();
1739
1740 if (mAnalysisChannel) {
1741 mAnalysisChannel->stop();
1742 }
1743 if (mMetadataChannel) {
1744 /* If content of mStreamInfo is not 0, there is metadata stream */
1745 mMetadataChannel->stop();
1746 }
1747 if (mChannelHandle) {
1748 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1749 mChannelHandle);
1750 LOGD("stopping channel %d", mChannelHandle);
1751 }
1752
1753 pthread_mutex_lock(&mMutex);
1754
1755 // Check state
1756 switch (mState) {
1757 case INITIALIZED:
1758 case CONFIGURED:
1759 case STARTED:
1760 /* valid state */
1761 break;
1762 default:
1763 LOGE("Invalid state %d", mState);
1764 pthread_mutex_unlock(&mMutex);
1765 return -ENODEV;
1766 }
1767
1768 /* Check whether we have video stream */
1769 m_bIs4KVideo = false;
1770 m_bIsVideo = false;
1771 m_bEisSupportedSize = false;
1772 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001773 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001774 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001775 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001776 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001777 uint32_t videoWidth = 0U;
1778 uint32_t videoHeight = 0U;
1779 size_t rawStreamCnt = 0;
1780 size_t stallStreamCnt = 0;
1781 size_t processedStreamCnt = 0;
1782 // Number of streams on ISP encoder path
1783 size_t numStreamsOnEncoder = 0;
1784 size_t numYuv888OnEncoder = 0;
1785 bool bYuv888OverrideJpeg = false;
1786 cam_dimension_t largeYuv888Size = {0, 0};
1787 cam_dimension_t maxViewfinderSize = {0, 0};
1788 bool bJpegExceeds4K = false;
1789 bool bJpegOnEncoder = false;
1790 bool bUseCommonFeatureMask = false;
1791 cam_feature_mask_t commonFeatureMask = 0;
1792 bool bSmallJpegSize = false;
1793 uint32_t width_ratio;
1794 uint32_t height_ratio;
1795 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1796 camera3_stream_t *inputStream = NULL;
1797 bool isJpeg = false;
1798 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001799 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001800 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001801
1802 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1803
1804 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001805 uint8_t eis_prop_set;
1806 uint32_t maxEisWidth = 0;
1807 uint32_t maxEisHeight = 0;
1808
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001809 // Initialize all instant AEC related variables
1810 mInstantAEC = false;
1811 mResetInstantAEC = false;
1812 mInstantAECSettledFrameNumber = 0;
1813 mAecSkipDisplayFrameBound = 0;
1814 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001815 mCurrFeatureState = 0;
1816 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001817
Thierry Strudel3d639192016-09-09 11:52:26 -07001818 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1819
1820 size_t count = IS_TYPE_MAX;
1821 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1822 for (size_t i = 0; i < count; i++) {
1823 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001824 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1825 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001826 break;
1827 }
1828 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001829
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001830 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001831 maxEisWidth = MAX_EIS_WIDTH;
1832 maxEisHeight = MAX_EIS_HEIGHT;
1833 }
1834
1835 /* EIS setprop control */
1836 char eis_prop[PROPERTY_VALUE_MAX];
1837 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001838 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001839 eis_prop_set = (uint8_t)atoi(eis_prop);
1840
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001841 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001842 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1843
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001844 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1845 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001846
Thierry Strudel3d639192016-09-09 11:52:26 -07001847 /* stream configurations */
1848 for (size_t i = 0; i < streamList->num_streams; i++) {
1849 camera3_stream_t *newStream = streamList->streams[i];
1850 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1851 "height = %d, rotation = %d, usage = 0x%x",
1852 i, newStream->stream_type, newStream->format,
1853 newStream->width, newStream->height, newStream->rotation,
1854 newStream->usage);
1855 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1856 newStream->stream_type == CAMERA3_STREAM_INPUT){
1857 isZsl = true;
1858 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001859 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1860 IS_USAGE_PREVIEW(newStream->usage)) {
1861 isPreview = true;
1862 }
1863
Thierry Strudel3d639192016-09-09 11:52:26 -07001864 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1865 inputStream = newStream;
1866 }
1867
Emilian Peev7650c122017-01-19 08:24:33 -08001868 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1869 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001870 isJpeg = true;
1871 jpegSize.width = newStream->width;
1872 jpegSize.height = newStream->height;
1873 if (newStream->width > VIDEO_4K_WIDTH ||
1874 newStream->height > VIDEO_4K_HEIGHT)
1875 bJpegExceeds4K = true;
1876 }
1877
1878 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1879 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1880 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001881 // In HAL3 we can have multiple different video streams.
1882 // The variables video width and height are used below as
1883 // dimensions of the biggest of them
1884 if (videoWidth < newStream->width ||
1885 videoHeight < newStream->height) {
1886 videoWidth = newStream->width;
1887 videoHeight = newStream->height;
1888 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001889 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1890 (VIDEO_4K_HEIGHT <= newStream->height)) {
1891 m_bIs4KVideo = true;
1892 }
1893 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1894 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001895
Thierry Strudel3d639192016-09-09 11:52:26 -07001896 }
1897 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1898 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1899 switch (newStream->format) {
1900 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001901 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1902 depthPresent = true;
1903 break;
1904 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001905 stallStreamCnt++;
1906 if (isOnEncoder(maxViewfinderSize, newStream->width,
1907 newStream->height)) {
1908 numStreamsOnEncoder++;
1909 bJpegOnEncoder = true;
1910 }
1911 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1912 newStream->width);
1913 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1914 newStream->height);;
1915 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1916 "FATAL: max_downscale_factor cannot be zero and so assert");
1917 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1918 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1919 LOGH("Setting small jpeg size flag to true");
1920 bSmallJpegSize = true;
1921 }
1922 break;
1923 case HAL_PIXEL_FORMAT_RAW10:
1924 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1925 case HAL_PIXEL_FORMAT_RAW16:
1926 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001927 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1928 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1929 pdStatCount++;
1930 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001931 break;
1932 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1933 processedStreamCnt++;
1934 if (isOnEncoder(maxViewfinderSize, newStream->width,
1935 newStream->height)) {
1936 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1937 !IS_USAGE_ZSL(newStream->usage)) {
1938 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1939 }
1940 numStreamsOnEncoder++;
1941 }
1942 break;
1943 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1944 processedStreamCnt++;
1945 if (isOnEncoder(maxViewfinderSize, newStream->width,
1946 newStream->height)) {
1947 // If Yuv888 size is not greater than 4K, set feature mask
1948 // to SUPERSET so that it support concurrent request on
1949 // YUV and JPEG.
1950 if (newStream->width <= VIDEO_4K_WIDTH &&
1951 newStream->height <= VIDEO_4K_HEIGHT) {
1952 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1953 }
1954 numStreamsOnEncoder++;
1955 numYuv888OnEncoder++;
1956 largeYuv888Size.width = newStream->width;
1957 largeYuv888Size.height = newStream->height;
1958 }
1959 break;
1960 default:
1961 processedStreamCnt++;
1962 if (isOnEncoder(maxViewfinderSize, newStream->width,
1963 newStream->height)) {
1964 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1965 numStreamsOnEncoder++;
1966 }
1967 break;
1968 }
1969
1970 }
1971 }
1972
1973 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1974 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1975 !m_bIsVideo) {
1976 m_bEisEnable = false;
1977 }
1978
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001979 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1980 pthread_mutex_unlock(&mMutex);
1981 return -EINVAL;
1982 }
1983
Thierry Strudel54dc9782017-02-15 12:12:10 -08001984 uint8_t forceEnableTnr = 0;
1985 char tnr_prop[PROPERTY_VALUE_MAX];
1986 memset(tnr_prop, 0, sizeof(tnr_prop));
1987 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1988 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1989
Thierry Strudel3d639192016-09-09 11:52:26 -07001990 /* Logic to enable/disable TNR based on specific config size/etc.*/
1991 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001992 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1993 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001994 else if (forceEnableTnr)
1995 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001996
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001997 char videoHdrProp[PROPERTY_VALUE_MAX];
1998 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1999 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2000 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2001
2002 if (hdr_mode_prop == 1 && m_bIsVideo &&
2003 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2004 m_bVideoHdrEnabled = true;
2005 else
2006 m_bVideoHdrEnabled = false;
2007
2008
Thierry Strudel3d639192016-09-09 11:52:26 -07002009 /* Check if num_streams is sane */
2010 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2011 rawStreamCnt > MAX_RAW_STREAMS ||
2012 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2013 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2014 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2015 pthread_mutex_unlock(&mMutex);
2016 return -EINVAL;
2017 }
2018 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002019 if (isZsl && m_bIs4KVideo) {
2020 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002021 pthread_mutex_unlock(&mMutex);
2022 return -EINVAL;
2023 }
2024 /* Check if stream sizes are sane */
2025 if (numStreamsOnEncoder > 2) {
2026 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2027 pthread_mutex_unlock(&mMutex);
2028 return -EINVAL;
2029 } else if (1 < numStreamsOnEncoder){
2030 bUseCommonFeatureMask = true;
2031 LOGH("Multiple streams above max viewfinder size, common mask needed");
2032 }
2033
2034 /* Check if BLOB size is greater than 4k in 4k recording case */
2035 if (m_bIs4KVideo && bJpegExceeds4K) {
2036 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2037 pthread_mutex_unlock(&mMutex);
2038 return -EINVAL;
2039 }
2040
Emilian Peev7650c122017-01-19 08:24:33 -08002041 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2042 depthPresent) {
2043 LOGE("HAL doesn't support depth streams in HFR mode!");
2044 pthread_mutex_unlock(&mMutex);
2045 return -EINVAL;
2046 }
2047
Thierry Strudel3d639192016-09-09 11:52:26 -07002048 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2049 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2050 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2051 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2052 // configurations:
2053 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2054 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2055 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2056 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2057 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2058 __func__);
2059 pthread_mutex_unlock(&mMutex);
2060 return -EINVAL;
2061 }
2062
2063 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2064 // the YUV stream's size is greater or equal to the JPEG size, set common
2065 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2066 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2067 jpegSize.width, jpegSize.height) &&
2068 largeYuv888Size.width > jpegSize.width &&
2069 largeYuv888Size.height > jpegSize.height) {
2070 bYuv888OverrideJpeg = true;
2071 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2072 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2073 }
2074
2075 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2076 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2077 commonFeatureMask);
2078 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2079 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2080
2081 rc = validateStreamDimensions(streamList);
2082 if (rc == NO_ERROR) {
2083 rc = validateStreamRotations(streamList);
2084 }
2085 if (rc != NO_ERROR) {
2086 LOGE("Invalid stream configuration requested!");
2087 pthread_mutex_unlock(&mMutex);
2088 return rc;
2089 }
2090
Emilian Peev0f3c3162017-03-15 12:57:46 +00002091 if (1 < pdStatCount) {
2092 LOGE("HAL doesn't support multiple PD streams");
2093 pthread_mutex_unlock(&mMutex);
2094 return -EINVAL;
2095 }
2096
2097 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2098 (1 == pdStatCount)) {
2099 LOGE("HAL doesn't support PD streams in HFR mode!");
2100 pthread_mutex_unlock(&mMutex);
2101 return -EINVAL;
2102 }
2103
Thierry Strudel3d639192016-09-09 11:52:26 -07002104 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2105 for (size_t i = 0; i < streamList->num_streams; i++) {
2106 camera3_stream_t *newStream = streamList->streams[i];
2107 LOGH("newStream type = %d, stream format = %d "
2108 "stream size : %d x %d, stream rotation = %d",
2109 newStream->stream_type, newStream->format,
2110 newStream->width, newStream->height, newStream->rotation);
2111 //if the stream is in the mStreamList validate it
2112 bool stream_exists = false;
2113 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2114 it != mStreamInfo.end(); it++) {
2115 if ((*it)->stream == newStream) {
2116 QCamera3ProcessingChannel *channel =
2117 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2118 stream_exists = true;
2119 if (channel)
2120 delete channel;
2121 (*it)->status = VALID;
2122 (*it)->stream->priv = NULL;
2123 (*it)->channel = NULL;
2124 }
2125 }
2126 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2127 //new stream
2128 stream_info_t* stream_info;
2129 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2130 if (!stream_info) {
2131 LOGE("Could not allocate stream info");
2132 rc = -ENOMEM;
2133 pthread_mutex_unlock(&mMutex);
2134 return rc;
2135 }
2136 stream_info->stream = newStream;
2137 stream_info->status = VALID;
2138 stream_info->channel = NULL;
2139 mStreamInfo.push_back(stream_info);
2140 }
2141 /* Covers Opaque ZSL and API1 F/W ZSL */
2142 if (IS_USAGE_ZSL(newStream->usage)
2143 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2144 if (zslStream != NULL) {
2145 LOGE("Multiple input/reprocess streams requested!");
2146 pthread_mutex_unlock(&mMutex);
2147 return BAD_VALUE;
2148 }
2149 zslStream = newStream;
2150 }
2151 /* Covers YUV reprocess */
2152 if (inputStream != NULL) {
2153 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2154 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2155 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2156 && inputStream->width == newStream->width
2157 && inputStream->height == newStream->height) {
2158 if (zslStream != NULL) {
2159 /* This scenario indicates multiple YUV streams with same size
2160 * as input stream have been requested, since zsl stream handle
2161 * is solely use for the purpose of overriding the size of streams
2162 * which share h/w streams we will just make a guess here as to
2163 * which of the stream is a ZSL stream, this will be refactored
2164 * once we make generic logic for streams sharing encoder output
2165 */
2166 LOGH("Warning, Multiple ip/reprocess streams requested!");
2167 }
2168 zslStream = newStream;
2169 }
2170 }
2171 }
2172
2173 /* If a zsl stream is set, we know that we have configured at least one input or
2174 bidirectional stream */
2175 if (NULL != zslStream) {
2176 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2177 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2178 mInputStreamInfo.format = zslStream->format;
2179 mInputStreamInfo.usage = zslStream->usage;
2180 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2181 mInputStreamInfo.dim.width,
2182 mInputStreamInfo.dim.height,
2183 mInputStreamInfo.format, mInputStreamInfo.usage);
2184 }
2185
2186 cleanAndSortStreamInfo();
2187 if (mMetadataChannel) {
2188 delete mMetadataChannel;
2189 mMetadataChannel = NULL;
2190 }
2191 if (mSupportChannel) {
2192 delete mSupportChannel;
2193 mSupportChannel = NULL;
2194 }
2195
2196 if (mAnalysisChannel) {
2197 delete mAnalysisChannel;
2198 mAnalysisChannel = NULL;
2199 }
2200
2201 if (mDummyBatchChannel) {
2202 delete mDummyBatchChannel;
2203 mDummyBatchChannel = NULL;
2204 }
2205
Emilian Peev7650c122017-01-19 08:24:33 -08002206 if (mDepthChannel) {
2207 mDepthChannel = NULL;
2208 }
2209
Thierry Strudel2896d122017-02-23 19:18:03 -08002210 char is_type_value[PROPERTY_VALUE_MAX];
2211 property_get("persist.camera.is_type", is_type_value, "4");
2212 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2213
Binhao Line406f062017-05-03 14:39:44 -07002214 char property_value[PROPERTY_VALUE_MAX];
2215 property_get("persist.camera.gzoom.at", property_value, "0");
2216 int goog_zoom_at = atoi(property_value);
2217 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0);
2218 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0);
2219
2220 property_get("persist.camera.gzoom.4k", property_value, "0");
2221 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2222
Thierry Strudel3d639192016-09-09 11:52:26 -07002223 //Create metadata channel and initialize it
2224 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2225 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2226 gCamCapability[mCameraId]->color_arrangement);
2227 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2228 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002229 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002230 if (mMetadataChannel == NULL) {
2231 LOGE("failed to allocate metadata channel");
2232 rc = -ENOMEM;
2233 pthread_mutex_unlock(&mMutex);
2234 return rc;
2235 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002236 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002237 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2238 if (rc < 0) {
2239 LOGE("metadata channel initialization failed");
2240 delete mMetadataChannel;
2241 mMetadataChannel = NULL;
2242 pthread_mutex_unlock(&mMutex);
2243 return rc;
2244 }
2245
Thierry Strudel2896d122017-02-23 19:18:03 -08002246 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002247 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002248 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002249 // Keep track of preview/video streams indices.
2250 // There could be more than one preview streams, but only one video stream.
2251 int32_t video_stream_idx = -1;
2252 int32_t preview_stream_idx[streamList->num_streams];
2253 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002254 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2255 /* Allocate channel objects for the requested streams */
2256 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002257
Thierry Strudel3d639192016-09-09 11:52:26 -07002258 camera3_stream_t *newStream = streamList->streams[i];
2259 uint32_t stream_usage = newStream->usage;
2260 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2261 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2262 struct camera_info *p_info = NULL;
2263 pthread_mutex_lock(&gCamLock);
2264 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2265 pthread_mutex_unlock(&gCamLock);
2266 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2267 || IS_USAGE_ZSL(newStream->usage)) &&
2268 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002269 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002270 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002271 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2272 if (bUseCommonFeatureMask)
2273 zsl_ppmask = commonFeatureMask;
2274 else
2275 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002276 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002277 if (numStreamsOnEncoder > 0)
2278 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2279 else
2280 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002281 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002282 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002283 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002284 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002285 LOGH("Input stream configured, reprocess config");
2286 } else {
2287 //for non zsl streams find out the format
2288 switch (newStream->format) {
2289 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2290 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002291 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002292 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2293 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2294 /* add additional features to pp feature mask */
2295 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2296 mStreamConfigInfo.num_streams);
2297
2298 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2299 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2300 CAM_STREAM_TYPE_VIDEO;
2301 if (m_bTnrEnabled && m_bTnrVideo) {
2302 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2303 CAM_QCOM_FEATURE_CPP_TNR;
2304 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2305 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2306 ~CAM_QCOM_FEATURE_CDS;
2307 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002308 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2309 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2310 CAM_QTI_FEATURE_PPEISCORE;
2311 }
Binhao Line406f062017-05-03 14:39:44 -07002312 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2313 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2314 CAM_QCOM_FEATURE_GOOG_ZOOM;
2315 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002316 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002317 } else {
2318 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2319 CAM_STREAM_TYPE_PREVIEW;
2320 if (m_bTnrEnabled && m_bTnrPreview) {
2321 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2322 CAM_QCOM_FEATURE_CPP_TNR;
2323 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2324 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2325 ~CAM_QCOM_FEATURE_CDS;
2326 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002327 if(!m_bSwTnrPreview) {
2328 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2329 ~CAM_QTI_FEATURE_SW_TNR;
2330 }
Binhao Line406f062017-05-03 14:39:44 -07002331 if (is_goog_zoom_preview_enabled) {
2332 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2333 CAM_QCOM_FEATURE_GOOG_ZOOM;
2334 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002335 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002336 padding_info.width_padding = mSurfaceStridePadding;
2337 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002338 previewSize.width = (int32_t)newStream->width;
2339 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002340 }
2341 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2342 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2343 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2344 newStream->height;
2345 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2346 newStream->width;
2347 }
2348 }
2349 break;
2350 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002351 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002352 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2353 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2354 if (bUseCommonFeatureMask)
2355 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2356 commonFeatureMask;
2357 else
2358 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2359 CAM_QCOM_FEATURE_NONE;
2360 } else {
2361 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2362 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2363 }
2364 break;
2365 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002366 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002367 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2368 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2369 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2370 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2371 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002372 /* Remove rotation if it is not supported
2373 for 4K LiveVideo snapshot case (online processing) */
2374 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2375 CAM_QCOM_FEATURE_ROTATION)) {
2376 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2377 &= ~CAM_QCOM_FEATURE_ROTATION;
2378 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002379 } else {
2380 if (bUseCommonFeatureMask &&
2381 isOnEncoder(maxViewfinderSize, newStream->width,
2382 newStream->height)) {
2383 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2384 } else {
2385 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2386 }
2387 }
2388 if (isZsl) {
2389 if (zslStream) {
2390 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2391 (int32_t)zslStream->width;
2392 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2393 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002394 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2395 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002396 } else {
2397 LOGE("Error, No ZSL stream identified");
2398 pthread_mutex_unlock(&mMutex);
2399 return -EINVAL;
2400 }
2401 } else if (m_bIs4KVideo) {
2402 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2403 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2404 } else if (bYuv888OverrideJpeg) {
2405 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2406 (int32_t)largeYuv888Size.width;
2407 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2408 (int32_t)largeYuv888Size.height;
2409 }
2410 break;
2411 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2412 case HAL_PIXEL_FORMAT_RAW16:
2413 case HAL_PIXEL_FORMAT_RAW10:
2414 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2415 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2416 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002417 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2418 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2419 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2420 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2421 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2422 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2423 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2424 gCamCapability[mCameraId]->dt[mPDIndex];
2425 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2426 gCamCapability[mCameraId]->vc[mPDIndex];
2427 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002428 break;
2429 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002430 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002431 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2432 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2433 break;
2434 }
2435 }
2436
2437 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2438 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2439 gCamCapability[mCameraId]->color_arrangement);
2440
2441 if (newStream->priv == NULL) {
2442 //New stream, construct channel
2443 switch (newStream->stream_type) {
2444 case CAMERA3_STREAM_INPUT:
2445 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2446 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2447 break;
2448 case CAMERA3_STREAM_BIDIRECTIONAL:
2449 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2450 GRALLOC_USAGE_HW_CAMERA_WRITE;
2451 break;
2452 case CAMERA3_STREAM_OUTPUT:
2453 /* For video encoding stream, set read/write rarely
2454 * flag so that they may be set to un-cached */
2455 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2456 newStream->usage |=
2457 (GRALLOC_USAGE_SW_READ_RARELY |
2458 GRALLOC_USAGE_SW_WRITE_RARELY |
2459 GRALLOC_USAGE_HW_CAMERA_WRITE);
2460 else if (IS_USAGE_ZSL(newStream->usage))
2461 {
2462 LOGD("ZSL usage flag skipping");
2463 }
2464 else if (newStream == zslStream
2465 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2466 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2467 } else
2468 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2469 break;
2470 default:
2471 LOGE("Invalid stream_type %d", newStream->stream_type);
2472 break;
2473 }
2474
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002475 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002476 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2477 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2478 QCamera3ProcessingChannel *channel = NULL;
2479 switch (newStream->format) {
2480 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2481 if ((newStream->usage &
2482 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2483 (streamList->operation_mode ==
2484 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2485 ) {
2486 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2487 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002488 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002489 this,
2490 newStream,
2491 (cam_stream_type_t)
2492 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2493 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2494 mMetadataChannel,
2495 0); //heap buffers are not required for HFR video channel
2496 if (channel == NULL) {
2497 LOGE("allocation of channel failed");
2498 pthread_mutex_unlock(&mMutex);
2499 return -ENOMEM;
2500 }
2501 //channel->getNumBuffers() will return 0 here so use
2502 //MAX_INFLIGH_HFR_REQUESTS
2503 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2504 newStream->priv = channel;
2505 LOGI("num video buffers in HFR mode: %d",
2506 MAX_INFLIGHT_HFR_REQUESTS);
2507 } else {
2508 /* Copy stream contents in HFR preview only case to create
2509 * dummy batch channel so that sensor streaming is in
2510 * HFR mode */
2511 if (!m_bIsVideo && (streamList->operation_mode ==
2512 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2513 mDummyBatchStream = *newStream;
2514 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002515 int bufferCount = MAX_INFLIGHT_REQUESTS;
2516 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2517 CAM_STREAM_TYPE_VIDEO) {
2518 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2519 bufferCount = MAX_VIDEO_BUFFERS;
2520 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002521 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2522 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002523 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002524 this,
2525 newStream,
2526 (cam_stream_type_t)
2527 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2528 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2529 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002530 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002531 if (channel == NULL) {
2532 LOGE("allocation of channel failed");
2533 pthread_mutex_unlock(&mMutex);
2534 return -ENOMEM;
2535 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002536 /* disable UBWC for preview, though supported,
2537 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002538 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002539 (previewSize.width == (int32_t)videoWidth)&&
2540 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002541 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002542 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002543 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002544 /* When goog_zoom is linked to the preview or video stream,
2545 * disable ubwc to the linked stream */
2546 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2547 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2548 channel->setUBWCEnabled(false);
2549 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002550 newStream->max_buffers = channel->getNumBuffers();
2551 newStream->priv = channel;
2552 }
2553 break;
2554 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2555 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2556 mChannelHandle,
2557 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002558 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002559 this,
2560 newStream,
2561 (cam_stream_type_t)
2562 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2563 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2564 mMetadataChannel);
2565 if (channel == NULL) {
2566 LOGE("allocation of YUV channel failed");
2567 pthread_mutex_unlock(&mMutex);
2568 return -ENOMEM;
2569 }
2570 newStream->max_buffers = channel->getNumBuffers();
2571 newStream->priv = channel;
2572 break;
2573 }
2574 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2575 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002576 case HAL_PIXEL_FORMAT_RAW10: {
2577 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2578 (HAL_DATASPACE_DEPTH != newStream->data_space))
2579 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002580 mRawChannel = new QCamera3RawChannel(
2581 mCameraHandle->camera_handle, mChannelHandle,
2582 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002583 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002584 this, newStream,
2585 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002586 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002587 if (mRawChannel == NULL) {
2588 LOGE("allocation of raw channel failed");
2589 pthread_mutex_unlock(&mMutex);
2590 return -ENOMEM;
2591 }
2592 newStream->max_buffers = mRawChannel->getNumBuffers();
2593 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2594 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002595 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002596 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002597 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2598 mDepthChannel = new QCamera3DepthChannel(
2599 mCameraHandle->camera_handle, mChannelHandle,
2600 mCameraHandle->ops, NULL, NULL, &padding_info,
2601 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2602 mMetadataChannel);
2603 if (NULL == mDepthChannel) {
2604 LOGE("Allocation of depth channel failed");
2605 pthread_mutex_unlock(&mMutex);
2606 return NO_MEMORY;
2607 }
2608 newStream->priv = mDepthChannel;
2609 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2610 } else {
2611 // Max live snapshot inflight buffer is 1. This is to mitigate
2612 // frame drop issues for video snapshot. The more buffers being
2613 // allocated, the more frame drops there are.
2614 mPictureChannel = new QCamera3PicChannel(
2615 mCameraHandle->camera_handle, mChannelHandle,
2616 mCameraHandle->ops, captureResultCb,
2617 setBufferErrorStatus, &padding_info, this, newStream,
2618 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2619 m_bIs4KVideo, isZsl, mMetadataChannel,
2620 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2621 if (mPictureChannel == NULL) {
2622 LOGE("allocation of channel failed");
2623 pthread_mutex_unlock(&mMutex);
2624 return -ENOMEM;
2625 }
2626 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2627 newStream->max_buffers = mPictureChannel->getNumBuffers();
2628 mPictureChannel->overrideYuvSize(
2629 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2630 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002631 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002632 break;
2633
2634 default:
2635 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002636 pthread_mutex_unlock(&mMutex);
2637 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002638 }
2639 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2640 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2641 } else {
2642 LOGE("Error, Unknown stream type");
2643 pthread_mutex_unlock(&mMutex);
2644 return -EINVAL;
2645 }
2646
2647 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002648 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2649 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002650 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002651 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002652 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2653 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2654 }
2655 }
2656
2657 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2658 it != mStreamInfo.end(); it++) {
2659 if ((*it)->stream == newStream) {
2660 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2661 break;
2662 }
2663 }
2664 } else {
2665 // Channel already exists for this stream
2666 // Do nothing for now
2667 }
2668 padding_info = gCamCapability[mCameraId]->padding_info;
2669
Emilian Peev7650c122017-01-19 08:24:33 -08002670 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002671 * since there is no real stream associated with it
2672 */
Emilian Peev7650c122017-01-19 08:24:33 -08002673 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002674 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2675 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002676 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002677 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002678 }
2679
Binhao Lincdb362a2017-04-20 13:31:54 -07002680 // By default, preview stream TNR is disabled.
2681 // Enable TNR to the preview stream if all conditions below are satisfied:
2682 // 1. resolution <= 1080p.
2683 // 2. preview resolution == video resolution.
2684 // 3. video stream TNR is enabled.
2685 // 4. EIS2.0
2686 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2687 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2688 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2689 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2690 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2691 video_stream->width == preview_stream->width &&
2692 video_stream->height == preview_stream->height) {
2693 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2694 CAM_QCOM_FEATURE_CPP_TNR;
2695 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2696 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2697 ~CAM_QCOM_FEATURE_CDS;
2698 }
2699 }
2700
Thierry Strudel2896d122017-02-23 19:18:03 -08002701 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2702 onlyRaw = false;
2703 }
2704
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002705 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002706 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002707 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002708 cam_analysis_info_t analysisInfo;
2709 int32_t ret = NO_ERROR;
2710 ret = mCommon.getAnalysisInfo(
2711 FALSE,
2712 analysisFeatureMask,
2713 &analysisInfo);
2714 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002715 cam_color_filter_arrangement_t analysis_color_arrangement =
2716 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2717 CAM_FILTER_ARRANGEMENT_Y :
2718 gCamCapability[mCameraId]->color_arrangement);
2719 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2720 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002721 cam_dimension_t analysisDim;
2722 analysisDim = mCommon.getMatchingDimension(previewSize,
2723 analysisInfo.analysis_recommended_res);
2724
2725 mAnalysisChannel = new QCamera3SupportChannel(
2726 mCameraHandle->camera_handle,
2727 mChannelHandle,
2728 mCameraHandle->ops,
2729 &analysisInfo.analysis_padding_info,
2730 analysisFeatureMask,
2731 CAM_STREAM_TYPE_ANALYSIS,
2732 &analysisDim,
2733 (analysisInfo.analysis_format
2734 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2735 : CAM_FORMAT_YUV_420_NV21),
2736 analysisInfo.hw_analysis_supported,
2737 gCamCapability[mCameraId]->color_arrangement,
2738 this,
2739 0); // force buffer count to 0
2740 } else {
2741 LOGW("getAnalysisInfo failed, ret = %d", ret);
2742 }
2743 if (!mAnalysisChannel) {
2744 LOGW("Analysis channel cannot be created");
2745 }
2746 }
2747
Thierry Strudel3d639192016-09-09 11:52:26 -07002748 //RAW DUMP channel
2749 if (mEnableRawDump && isRawStreamRequested == false){
2750 cam_dimension_t rawDumpSize;
2751 rawDumpSize = getMaxRawSize(mCameraId);
2752 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2753 setPAAFSupport(rawDumpFeatureMask,
2754 CAM_STREAM_TYPE_RAW,
2755 gCamCapability[mCameraId]->color_arrangement);
2756 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2757 mChannelHandle,
2758 mCameraHandle->ops,
2759 rawDumpSize,
2760 &padding_info,
2761 this, rawDumpFeatureMask);
2762 if (!mRawDumpChannel) {
2763 LOGE("Raw Dump channel cannot be created");
2764 pthread_mutex_unlock(&mMutex);
2765 return -ENOMEM;
2766 }
2767 }
2768
Thierry Strudel3d639192016-09-09 11:52:26 -07002769 if (mAnalysisChannel) {
2770 cam_analysis_info_t analysisInfo;
2771 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2772 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2773 CAM_STREAM_TYPE_ANALYSIS;
2774 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2775 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002776 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002777 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2778 &analysisInfo);
2779 if (rc != NO_ERROR) {
2780 LOGE("getAnalysisInfo failed, ret = %d", rc);
2781 pthread_mutex_unlock(&mMutex);
2782 return rc;
2783 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002784 cam_color_filter_arrangement_t analysis_color_arrangement =
2785 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2786 CAM_FILTER_ARRANGEMENT_Y :
2787 gCamCapability[mCameraId]->color_arrangement);
2788 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2789 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2790 analysis_color_arrangement);
2791
Thierry Strudel3d639192016-09-09 11:52:26 -07002792 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002793 mCommon.getMatchingDimension(previewSize,
2794 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002795 mStreamConfigInfo.num_streams++;
2796 }
2797
Thierry Strudel2896d122017-02-23 19:18:03 -08002798 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002799 cam_analysis_info_t supportInfo;
2800 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2801 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2802 setPAAFSupport(callbackFeatureMask,
2803 CAM_STREAM_TYPE_CALLBACK,
2804 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002805 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002806 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002807 if (ret != NO_ERROR) {
2808 /* Ignore the error for Mono camera
2809 * because the PAAF bit mask is only set
2810 * for CAM_STREAM_TYPE_ANALYSIS stream type
2811 */
2812 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2813 LOGW("getAnalysisInfo failed, ret = %d", ret);
2814 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002815 }
2816 mSupportChannel = new QCamera3SupportChannel(
2817 mCameraHandle->camera_handle,
2818 mChannelHandle,
2819 mCameraHandle->ops,
2820 &gCamCapability[mCameraId]->padding_info,
2821 callbackFeatureMask,
2822 CAM_STREAM_TYPE_CALLBACK,
2823 &QCamera3SupportChannel::kDim,
2824 CAM_FORMAT_YUV_420_NV21,
2825 supportInfo.hw_analysis_supported,
2826 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002827 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002828 if (!mSupportChannel) {
2829 LOGE("dummy channel cannot be created");
2830 pthread_mutex_unlock(&mMutex);
2831 return -ENOMEM;
2832 }
2833 }
2834
2835 if (mSupportChannel) {
2836 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2837 QCamera3SupportChannel::kDim;
2838 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2839 CAM_STREAM_TYPE_CALLBACK;
2840 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2841 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2842 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2843 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2844 gCamCapability[mCameraId]->color_arrangement);
2845 mStreamConfigInfo.num_streams++;
2846 }
2847
2848 if (mRawDumpChannel) {
2849 cam_dimension_t rawSize;
2850 rawSize = getMaxRawSize(mCameraId);
2851 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2852 rawSize;
2853 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2854 CAM_STREAM_TYPE_RAW;
2855 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2856 CAM_QCOM_FEATURE_NONE;
2857 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2858 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2859 gCamCapability[mCameraId]->color_arrangement);
2860 mStreamConfigInfo.num_streams++;
2861 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002862
2863 if (mHdrPlusRawSrcChannel) {
2864 cam_dimension_t rawSize;
2865 rawSize = getMaxRawSize(mCameraId);
2866 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2867 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2868 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2869 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2870 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2871 gCamCapability[mCameraId]->color_arrangement);
2872 mStreamConfigInfo.num_streams++;
2873 }
2874
Thierry Strudel3d639192016-09-09 11:52:26 -07002875 /* In HFR mode, if video stream is not added, create a dummy channel so that
2876 * ISP can create a batch mode even for preview only case. This channel is
2877 * never 'start'ed (no stream-on), it is only 'initialized' */
2878 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2879 !m_bIsVideo) {
2880 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2881 setPAAFSupport(dummyFeatureMask,
2882 CAM_STREAM_TYPE_VIDEO,
2883 gCamCapability[mCameraId]->color_arrangement);
2884 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2885 mChannelHandle,
2886 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002887 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002888 this,
2889 &mDummyBatchStream,
2890 CAM_STREAM_TYPE_VIDEO,
2891 dummyFeatureMask,
2892 mMetadataChannel);
2893 if (NULL == mDummyBatchChannel) {
2894 LOGE("creation of mDummyBatchChannel failed."
2895 "Preview will use non-hfr sensor mode ");
2896 }
2897 }
2898 if (mDummyBatchChannel) {
2899 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2900 mDummyBatchStream.width;
2901 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2902 mDummyBatchStream.height;
2903 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2904 CAM_STREAM_TYPE_VIDEO;
2905 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2906 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2907 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2908 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2909 gCamCapability[mCameraId]->color_arrangement);
2910 mStreamConfigInfo.num_streams++;
2911 }
2912
2913 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2914 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002915 m_bIs4KVideo ? 0 :
2916 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002917
2918 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2919 for (pendingRequestIterator i = mPendingRequestsList.begin();
2920 i != mPendingRequestsList.end();) {
2921 i = erasePendingRequest(i);
2922 }
2923 mPendingFrameDropList.clear();
2924 // Initialize/Reset the pending buffers list
2925 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2926 req.mPendingBufferList.clear();
2927 }
2928 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2929
Thierry Strudel3d639192016-09-09 11:52:26 -07002930 mCurJpegMeta.clear();
2931 //Get min frame duration for this streams configuration
2932 deriveMinFrameDuration();
2933
Chien-Yu Chenee335912017-02-09 17:53:20 -08002934 mFirstPreviewIntentSeen = false;
2935
2936 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002937 {
2938 Mutex::Autolock l(gHdrPlusClientLock);
2939 disableHdrPlusModeLocked();
2940 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002941
Thierry Strudel3d639192016-09-09 11:52:26 -07002942 // Update state
2943 mState = CONFIGURED;
2944
Shuzhen Wang3c077d72017-04-20 22:48:59 -07002945 mFirstMetadataCallback = true;
2946
Thierry Strudel3d639192016-09-09 11:52:26 -07002947 pthread_mutex_unlock(&mMutex);
2948
2949 return rc;
2950}
2951
2952/*===========================================================================
2953 * FUNCTION : validateCaptureRequest
2954 *
2955 * DESCRIPTION: validate a capture request from camera service
2956 *
2957 * PARAMETERS :
2958 * @request : request from framework to process
2959 *
2960 * RETURN :
2961 *
2962 *==========================================================================*/
2963int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002964 camera3_capture_request_t *request,
2965 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002966{
2967 ssize_t idx = 0;
2968 const camera3_stream_buffer_t *b;
2969 CameraMetadata meta;
2970
2971 /* Sanity check the request */
2972 if (request == NULL) {
2973 LOGE("NULL capture request");
2974 return BAD_VALUE;
2975 }
2976
2977 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2978 /*settings cannot be null for the first request*/
2979 return BAD_VALUE;
2980 }
2981
2982 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002983 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2984 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002985 LOGE("Request %d: No output buffers provided!",
2986 __FUNCTION__, frameNumber);
2987 return BAD_VALUE;
2988 }
2989 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2990 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2991 request->num_output_buffers, MAX_NUM_STREAMS);
2992 return BAD_VALUE;
2993 }
2994 if (request->input_buffer != NULL) {
2995 b = request->input_buffer;
2996 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2997 LOGE("Request %d: Buffer %ld: Status not OK!",
2998 frameNumber, (long)idx);
2999 return BAD_VALUE;
3000 }
3001 if (b->release_fence != -1) {
3002 LOGE("Request %d: Buffer %ld: Has a release fence!",
3003 frameNumber, (long)idx);
3004 return BAD_VALUE;
3005 }
3006 if (b->buffer == NULL) {
3007 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3008 frameNumber, (long)idx);
3009 return BAD_VALUE;
3010 }
3011 }
3012
3013 // Validate all buffers
3014 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003015 if (b == NULL) {
3016 return BAD_VALUE;
3017 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003018 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003019 QCamera3ProcessingChannel *channel =
3020 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3021 if (channel == NULL) {
3022 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3023 frameNumber, (long)idx);
3024 return BAD_VALUE;
3025 }
3026 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3027 LOGE("Request %d: Buffer %ld: Status not OK!",
3028 frameNumber, (long)idx);
3029 return BAD_VALUE;
3030 }
3031 if (b->release_fence != -1) {
3032 LOGE("Request %d: Buffer %ld: Has a release fence!",
3033 frameNumber, (long)idx);
3034 return BAD_VALUE;
3035 }
3036 if (b->buffer == NULL) {
3037 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3038 frameNumber, (long)idx);
3039 return BAD_VALUE;
3040 }
3041 if (*(b->buffer) == NULL) {
3042 LOGE("Request %d: Buffer %ld: NULL private handle!",
3043 frameNumber, (long)idx);
3044 return BAD_VALUE;
3045 }
3046 idx++;
3047 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003048 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003049 return NO_ERROR;
3050}
3051
3052/*===========================================================================
3053 * FUNCTION : deriveMinFrameDuration
3054 *
3055 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3056 * on currently configured streams.
3057 *
3058 * PARAMETERS : NONE
3059 *
3060 * RETURN : NONE
3061 *
3062 *==========================================================================*/
3063void QCamera3HardwareInterface::deriveMinFrameDuration()
3064{
3065 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3066
3067 maxJpegDim = 0;
3068 maxProcessedDim = 0;
3069 maxRawDim = 0;
3070
3071 // Figure out maximum jpeg, processed, and raw dimensions
3072 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3073 it != mStreamInfo.end(); it++) {
3074
3075 // Input stream doesn't have valid stream_type
3076 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3077 continue;
3078
3079 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3080 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3081 if (dimension > maxJpegDim)
3082 maxJpegDim = dimension;
3083 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3084 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3085 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3086 if (dimension > maxRawDim)
3087 maxRawDim = dimension;
3088 } else {
3089 if (dimension > maxProcessedDim)
3090 maxProcessedDim = dimension;
3091 }
3092 }
3093
3094 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3095 MAX_SIZES_CNT);
3096
3097 //Assume all jpeg dimensions are in processed dimensions.
3098 if (maxJpegDim > maxProcessedDim)
3099 maxProcessedDim = maxJpegDim;
3100 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3101 if (maxProcessedDim > maxRawDim) {
3102 maxRawDim = INT32_MAX;
3103
3104 for (size_t i = 0; i < count; i++) {
3105 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3106 gCamCapability[mCameraId]->raw_dim[i].height;
3107 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3108 maxRawDim = dimension;
3109 }
3110 }
3111
3112 //Find minimum durations for processed, jpeg, and raw
3113 for (size_t i = 0; i < count; i++) {
3114 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3115 gCamCapability[mCameraId]->raw_dim[i].height) {
3116 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3117 break;
3118 }
3119 }
3120 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3121 for (size_t i = 0; i < count; i++) {
3122 if (maxProcessedDim ==
3123 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3124 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3125 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3126 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3127 break;
3128 }
3129 }
3130}
3131
3132/*===========================================================================
3133 * FUNCTION : getMinFrameDuration
3134 *
3135 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3136 * and current request configuration.
3137 *
3138 * PARAMETERS : @request: requset sent by the frameworks
3139 *
3140 * RETURN : min farme duration for a particular request
3141 *
3142 *==========================================================================*/
3143int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3144{
3145 bool hasJpegStream = false;
3146 bool hasRawStream = false;
3147 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3148 const camera3_stream_t *stream = request->output_buffers[i].stream;
3149 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3150 hasJpegStream = true;
3151 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3152 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3153 stream->format == HAL_PIXEL_FORMAT_RAW16)
3154 hasRawStream = true;
3155 }
3156
3157 if (!hasJpegStream)
3158 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3159 else
3160 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3161}
3162
3163/*===========================================================================
3164 * FUNCTION : handleBuffersDuringFlushLock
3165 *
3166 * DESCRIPTION: Account for buffers returned from back-end during flush
3167 * This function is executed while mMutex is held by the caller.
3168 *
3169 * PARAMETERS :
3170 * @buffer: image buffer for the callback
3171 *
3172 * RETURN :
3173 *==========================================================================*/
3174void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3175{
3176 bool buffer_found = false;
3177 for (List<PendingBuffersInRequest>::iterator req =
3178 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3179 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3180 for (List<PendingBufferInfo>::iterator i =
3181 req->mPendingBufferList.begin();
3182 i != req->mPendingBufferList.end(); i++) {
3183 if (i->buffer == buffer->buffer) {
3184 mPendingBuffersMap.numPendingBufsAtFlush--;
3185 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3186 buffer->buffer, req->frame_number,
3187 mPendingBuffersMap.numPendingBufsAtFlush);
3188 buffer_found = true;
3189 break;
3190 }
3191 }
3192 if (buffer_found) {
3193 break;
3194 }
3195 }
3196 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3197 //signal the flush()
3198 LOGD("All buffers returned to HAL. Continue flush");
3199 pthread_cond_signal(&mBuffersCond);
3200 }
3201}
3202
Thierry Strudel3d639192016-09-09 11:52:26 -07003203/*===========================================================================
3204 * FUNCTION : handleBatchMetadata
3205 *
3206 * DESCRIPTION: Handles metadata buffer callback in batch mode
3207 *
3208 * PARAMETERS : @metadata_buf: metadata buffer
3209 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3210 * the meta buf in this method
3211 *
3212 * RETURN :
3213 *
3214 *==========================================================================*/
3215void QCamera3HardwareInterface::handleBatchMetadata(
3216 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3217{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003218 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003219
3220 if (NULL == metadata_buf) {
3221 LOGE("metadata_buf is NULL");
3222 return;
3223 }
3224 /* In batch mode, the metdata will contain the frame number and timestamp of
3225 * the last frame in the batch. Eg: a batch containing buffers from request
3226 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3227 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3228 * multiple process_capture_results */
3229 metadata_buffer_t *metadata =
3230 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3231 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3232 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3233 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3234 uint32_t frame_number = 0, urgent_frame_number = 0;
3235 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3236 bool invalid_metadata = false;
3237 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3238 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003239 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003240
3241 int32_t *p_frame_number_valid =
3242 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3243 uint32_t *p_frame_number =
3244 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3245 int64_t *p_capture_time =
3246 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3247 int32_t *p_urgent_frame_number_valid =
3248 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3249 uint32_t *p_urgent_frame_number =
3250 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3251
3252 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3253 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3254 (NULL == p_urgent_frame_number)) {
3255 LOGE("Invalid metadata");
3256 invalid_metadata = true;
3257 } else {
3258 frame_number_valid = *p_frame_number_valid;
3259 last_frame_number = *p_frame_number;
3260 last_frame_capture_time = *p_capture_time;
3261 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3262 last_urgent_frame_number = *p_urgent_frame_number;
3263 }
3264
3265 /* In batchmode, when no video buffers are requested, set_parms are sent
3266 * for every capture_request. The difference between consecutive urgent
3267 * frame numbers and frame numbers should be used to interpolate the
3268 * corresponding frame numbers and time stamps */
3269 pthread_mutex_lock(&mMutex);
3270 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003271 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3272 if(idx < 0) {
3273 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3274 last_urgent_frame_number);
3275 mState = ERROR;
3276 pthread_mutex_unlock(&mMutex);
3277 return;
3278 }
3279 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003280 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3281 first_urgent_frame_number;
3282
3283 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3284 urgent_frame_number_valid,
3285 first_urgent_frame_number, last_urgent_frame_number);
3286 }
3287
3288 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003289 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3290 if(idx < 0) {
3291 LOGE("Invalid frame number received: %d. Irrecoverable error",
3292 last_frame_number);
3293 mState = ERROR;
3294 pthread_mutex_unlock(&mMutex);
3295 return;
3296 }
3297 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003298 frameNumDiff = last_frame_number + 1 -
3299 first_frame_number;
3300 mPendingBatchMap.removeItem(last_frame_number);
3301
3302 LOGD("frm: valid: %d frm_num: %d - %d",
3303 frame_number_valid,
3304 first_frame_number, last_frame_number);
3305
3306 }
3307 pthread_mutex_unlock(&mMutex);
3308
3309 if (urgent_frame_number_valid || frame_number_valid) {
3310 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3311 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3312 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3313 urgentFrameNumDiff, last_urgent_frame_number);
3314 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3315 LOGE("frameNumDiff: %d frameNum: %d",
3316 frameNumDiff, last_frame_number);
3317 }
3318
3319 for (size_t i = 0; i < loopCount; i++) {
3320 /* handleMetadataWithLock is called even for invalid_metadata for
3321 * pipeline depth calculation */
3322 if (!invalid_metadata) {
3323 /* Infer frame number. Batch metadata contains frame number of the
3324 * last frame */
3325 if (urgent_frame_number_valid) {
3326 if (i < urgentFrameNumDiff) {
3327 urgent_frame_number =
3328 first_urgent_frame_number + i;
3329 LOGD("inferred urgent frame_number: %d",
3330 urgent_frame_number);
3331 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3332 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3333 } else {
3334 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3335 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3336 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3337 }
3338 }
3339
3340 /* Infer frame number. Batch metadata contains frame number of the
3341 * last frame */
3342 if (frame_number_valid) {
3343 if (i < frameNumDiff) {
3344 frame_number = first_frame_number + i;
3345 LOGD("inferred frame_number: %d", frame_number);
3346 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3347 CAM_INTF_META_FRAME_NUMBER, frame_number);
3348 } else {
3349 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3350 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3351 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3352 }
3353 }
3354
3355 if (last_frame_capture_time) {
3356 //Infer timestamp
3357 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003358 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003359 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003360 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003361 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3362 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3363 LOGD("batch capture_time: %lld, capture_time: %lld",
3364 last_frame_capture_time, capture_time);
3365 }
3366 }
3367 pthread_mutex_lock(&mMutex);
3368 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003369 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003370 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3371 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003372 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003373 pthread_mutex_unlock(&mMutex);
3374 }
3375
3376 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003377 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003378 mMetadataChannel->bufDone(metadata_buf);
3379 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003380 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003381 }
3382}
3383
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003384void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3385 camera3_error_msg_code_t errorCode)
3386{
3387 camera3_notify_msg_t notify_msg;
3388 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3389 notify_msg.type = CAMERA3_MSG_ERROR;
3390 notify_msg.message.error.error_code = errorCode;
3391 notify_msg.message.error.error_stream = NULL;
3392 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003393 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003394
3395 return;
3396}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003397
3398/*===========================================================================
3399 * FUNCTION : sendPartialMetadataWithLock
3400 *
3401 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3402 *
3403 * PARAMETERS : @metadata: metadata buffer
3404 * @requestIter: The iterator for the pending capture request for
3405 * which the partial result is being sen
3406 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3407 * last urgent metadata in a batch. Always true for non-batch mode
3408 *
3409 * RETURN :
3410 *
3411 *==========================================================================*/
3412
3413void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3414 metadata_buffer_t *metadata,
3415 const pendingRequestIterator requestIter,
3416 bool lastUrgentMetadataInBatch)
3417{
3418 camera3_capture_result_t result;
3419 memset(&result, 0, sizeof(camera3_capture_result_t));
3420
3421 requestIter->partial_result_cnt++;
3422
3423 // Extract 3A metadata
3424 result.result = translateCbUrgentMetadataToResultMetadata(
3425 metadata, lastUrgentMetadataInBatch);
3426 // Populate metadata result
3427 result.frame_number = requestIter->frame_number;
3428 result.num_output_buffers = 0;
3429 result.output_buffers = NULL;
3430 result.partial_result = requestIter->partial_result_cnt;
3431
3432 {
3433 Mutex::Autolock l(gHdrPlusClientLock);
3434 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3435 // Notify HDR+ client about the partial metadata.
3436 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3437 result.partial_result == PARTIAL_RESULT_COUNT);
3438 }
3439 }
3440
3441 orchestrateResult(&result);
3442 LOGD("urgent frame_number = %u", result.frame_number);
3443 free_camera_metadata((camera_metadata_t *)result.result);
3444}
3445
Thierry Strudel3d639192016-09-09 11:52:26 -07003446/*===========================================================================
3447 * FUNCTION : handleMetadataWithLock
3448 *
3449 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3450 *
3451 * PARAMETERS : @metadata_buf: metadata buffer
3452 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3453 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003454 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3455 * last urgent metadata in a batch. Always true for non-batch mode
3456 * @lastMetadataInBatch: Boolean to indicate whether this is the
3457 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003458 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3459 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003460 *
3461 * RETURN :
3462 *
3463 *==========================================================================*/
3464void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003465 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003466 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3467 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003468{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003469 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003470 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3471 //during flush do not send metadata from this thread
3472 LOGD("not sending metadata during flush or when mState is error");
3473 if (free_and_bufdone_meta_buf) {
3474 mMetadataChannel->bufDone(metadata_buf);
3475 free(metadata_buf);
3476 }
3477 return;
3478 }
3479
3480 //not in flush
3481 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3482 int32_t frame_number_valid, urgent_frame_number_valid;
3483 uint32_t frame_number, urgent_frame_number;
3484 int64_t capture_time;
3485 nsecs_t currentSysTime;
3486
3487 int32_t *p_frame_number_valid =
3488 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3489 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3490 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3491 int32_t *p_urgent_frame_number_valid =
3492 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3493 uint32_t *p_urgent_frame_number =
3494 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3495 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3496 metadata) {
3497 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3498 *p_frame_number_valid, *p_frame_number);
3499 }
3500
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003501 camera_metadata_t *resultMetadata = nullptr;
3502
Thierry Strudel3d639192016-09-09 11:52:26 -07003503 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3504 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3505 LOGE("Invalid metadata");
3506 if (free_and_bufdone_meta_buf) {
3507 mMetadataChannel->bufDone(metadata_buf);
3508 free(metadata_buf);
3509 }
3510 goto done_metadata;
3511 }
3512 frame_number_valid = *p_frame_number_valid;
3513 frame_number = *p_frame_number;
3514 capture_time = *p_capture_time;
3515 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3516 urgent_frame_number = *p_urgent_frame_number;
3517 currentSysTime = systemTime(CLOCK_MONOTONIC);
3518
3519 // Detect if buffers from any requests are overdue
3520 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003521 int64_t timeout;
3522 {
3523 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3524 // If there is a pending HDR+ request, the following requests may be blocked until the
3525 // HDR+ request is done. So allow a longer timeout.
3526 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3527 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3528 }
3529
3530 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003531 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003532 assert(missed.stream->priv);
3533 if (missed.stream->priv) {
3534 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3535 assert(ch->mStreams[0]);
3536 if (ch->mStreams[0]) {
3537 LOGE("Cancel missing frame = %d, buffer = %p,"
3538 "stream type = %d, stream format = %d",
3539 req.frame_number, missed.buffer,
3540 ch->mStreams[0]->getMyType(), missed.stream->format);
3541 ch->timeoutFrame(req.frame_number);
3542 }
3543 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003544 }
3545 }
3546 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003547 //For the very first metadata callback, regardless whether it contains valid
3548 //frame number, send the partial metadata for the jumpstarting requests.
3549 //Note that this has to be done even if the metadata doesn't contain valid
3550 //urgent frame number, because in the case only 1 request is ever submitted
3551 //to HAL, there won't be subsequent valid urgent frame number.
3552 if (mFirstMetadataCallback) {
3553 for (pendingRequestIterator i =
3554 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3555 if (i->bUseFirstPartial) {
3556 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3557 }
3558 }
3559 mFirstMetadataCallback = false;
3560 }
3561
Thierry Strudel3d639192016-09-09 11:52:26 -07003562 //Partial result on process_capture_result for timestamp
3563 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003564 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003565
3566 //Recieved an urgent Frame Number, handle it
3567 //using partial results
3568 for (pendingRequestIterator i =
3569 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3570 LOGD("Iterator Frame = %d urgent frame = %d",
3571 i->frame_number, urgent_frame_number);
3572
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003573 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003574 (i->partial_result_cnt == 0)) {
3575 LOGE("Error: HAL missed urgent metadata for frame number %d",
3576 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003577 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003578 }
3579
3580 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003581 i->partial_result_cnt == 0) {
3582 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003583 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3584 // Instant AEC settled for this frame.
3585 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3586 mInstantAECSettledFrameNumber = urgent_frame_number;
3587 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003588 break;
3589 }
3590 }
3591 }
3592
3593 if (!frame_number_valid) {
3594 LOGD("Not a valid normal frame number, used as SOF only");
3595 if (free_and_bufdone_meta_buf) {
3596 mMetadataChannel->bufDone(metadata_buf);
3597 free(metadata_buf);
3598 }
3599 goto done_metadata;
3600 }
3601 LOGH("valid frame_number = %u, capture_time = %lld",
3602 frame_number, capture_time);
3603
Emilian Peev7650c122017-01-19 08:24:33 -08003604 if (metadata->is_depth_data_valid) {
3605 handleDepthDataLocked(metadata->depth_data, frame_number);
3606 }
3607
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003608 // Check whether any stream buffer corresponding to this is dropped or not
3609 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3610 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3611 for (auto & pendingRequest : mPendingRequestsList) {
3612 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3613 mInstantAECSettledFrameNumber)) {
3614 camera3_notify_msg_t notify_msg = {};
3615 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003616 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003617 QCamera3ProcessingChannel *channel =
3618 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003619 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003620 if (p_cam_frame_drop) {
3621 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003622 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003623 // Got the stream ID for drop frame.
3624 dropFrame = true;
3625 break;
3626 }
3627 }
3628 } else {
3629 // This is instant AEC case.
3630 // For instant AEC drop the stream untill AEC is settled.
3631 dropFrame = true;
3632 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003633
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003634 if (dropFrame) {
3635 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3636 if (p_cam_frame_drop) {
3637 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003638 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003639 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003640 } else {
3641 // For instant AEC, inform frame drop and frame number
3642 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3643 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003644 pendingRequest.frame_number, streamID,
3645 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003646 }
3647 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003648 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003649 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003650 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003651 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003652 if (p_cam_frame_drop) {
3653 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003654 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003655 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003656 } else {
3657 // For instant AEC, inform frame drop and frame number
3658 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3659 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003660 pendingRequest.frame_number, streamID,
3661 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003662 }
3663 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003664 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003665 PendingFrameDrop.stream_ID = streamID;
3666 // Add the Frame drop info to mPendingFrameDropList
3667 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003668 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003669 }
3670 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003671 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003672
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003673 for (auto & pendingRequest : mPendingRequestsList) {
3674 // Find the pending request with the frame number.
3675 if (pendingRequest.frame_number == frame_number) {
3676 // Update the sensor timestamp.
3677 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003678
Thierry Strudel3d639192016-09-09 11:52:26 -07003679
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003680 /* Set the timestamp in display metadata so that clients aware of
3681 private_handle such as VT can use this un-modified timestamps.
3682 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003683 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003684
Thierry Strudel3d639192016-09-09 11:52:26 -07003685 // Find channel requiring metadata, meaning internal offline postprocess
3686 // is needed.
3687 //TODO: for now, we don't support two streams requiring metadata at the same time.
3688 // (because we are not making copies, and metadata buffer is not reference counted.
3689 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003690 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3691 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003692 if (iter->need_metadata) {
3693 internalPproc = true;
3694 QCamera3ProcessingChannel *channel =
3695 (QCamera3ProcessingChannel *)iter->stream->priv;
3696 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003697 if(p_is_metabuf_queued != NULL) {
3698 *p_is_metabuf_queued = true;
3699 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003700 break;
3701 }
3702 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003703 for (auto itr = pendingRequest.internalRequestList.begin();
3704 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003705 if (itr->need_metadata) {
3706 internalPproc = true;
3707 QCamera3ProcessingChannel *channel =
3708 (QCamera3ProcessingChannel *)itr->stream->priv;
3709 channel->queueReprocMetadata(metadata_buf);
3710 break;
3711 }
3712 }
3713
Thierry Strudel54dc9782017-02-15 12:12:10 -08003714 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003715
3716 bool *enableZsl = nullptr;
3717 if (gExposeEnableZslKey) {
3718 enableZsl = &pendingRequest.enableZsl;
3719 }
3720
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003721 resultMetadata = translateFromHalMetadata(metadata,
3722 pendingRequest.timestamp, pendingRequest.request_id,
3723 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3724 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003725 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003726 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003727 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003728 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003729 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003730 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003731
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003732 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003733
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003734 if (pendingRequest.blob_request) {
3735 //Dump tuning metadata if enabled and available
3736 char prop[PROPERTY_VALUE_MAX];
3737 memset(prop, 0, sizeof(prop));
3738 property_get("persist.camera.dumpmetadata", prop, "0");
3739 int32_t enabled = atoi(prop);
3740 if (enabled && metadata->is_tuning_params_valid) {
3741 dumpMetadataToFile(metadata->tuning_params,
3742 mMetaFrameCount,
3743 enabled,
3744 "Snapshot",
3745 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003746 }
3747 }
3748
3749 if (!internalPproc) {
3750 LOGD("couldn't find need_metadata for this metadata");
3751 // Return metadata buffer
3752 if (free_and_bufdone_meta_buf) {
3753 mMetadataChannel->bufDone(metadata_buf);
3754 free(metadata_buf);
3755 }
3756 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003757
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003758 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003759 }
3760 }
3761
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003762 // Try to send out shutter callbacks and capture results.
3763 handlePendingResultsWithLock(frame_number, resultMetadata);
3764 return;
3765
Thierry Strudel3d639192016-09-09 11:52:26 -07003766done_metadata:
3767 for (pendingRequestIterator i = mPendingRequestsList.begin();
3768 i != mPendingRequestsList.end() ;i++) {
3769 i->pipeline_depth++;
3770 }
3771 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3772 unblockRequestIfNecessary();
3773}
3774
3775/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003776 * FUNCTION : handleDepthDataWithLock
3777 *
3778 * DESCRIPTION: Handles incoming depth data
3779 *
3780 * PARAMETERS : @depthData : Depth data
3781 * @frameNumber: Frame number of the incoming depth data
3782 *
3783 * RETURN :
3784 *
3785 *==========================================================================*/
3786void QCamera3HardwareInterface::handleDepthDataLocked(
3787 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3788 uint32_t currentFrameNumber;
3789 buffer_handle_t *depthBuffer;
3790
3791 if (nullptr == mDepthChannel) {
3792 LOGE("Depth channel not present!");
3793 return;
3794 }
3795
3796 camera3_stream_buffer_t resultBuffer =
3797 {.acquire_fence = -1,
3798 .release_fence = -1,
3799 .status = CAMERA3_BUFFER_STATUS_OK,
3800 .buffer = nullptr,
3801 .stream = mDepthChannel->getStream()};
3802 camera3_capture_result_t result =
3803 {.result = nullptr,
3804 .num_output_buffers = 1,
3805 .output_buffers = &resultBuffer,
3806 .partial_result = 0,
3807 .frame_number = 0};
3808
3809 do {
3810 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3811 if (nullptr == depthBuffer) {
3812 break;
3813 }
3814
3815 result.frame_number = currentFrameNumber;
3816 resultBuffer.buffer = depthBuffer;
3817 if (currentFrameNumber == frameNumber) {
3818 int32_t rc = mDepthChannel->populateDepthData(depthData,
3819 frameNumber);
3820 if (NO_ERROR != rc) {
3821 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3822 } else {
3823 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3824 }
3825 } else if (currentFrameNumber > frameNumber) {
3826 break;
3827 } else {
3828 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3829 {{currentFrameNumber, mDepthChannel->getStream(),
3830 CAMERA3_MSG_ERROR_BUFFER}}};
3831 orchestrateNotify(&notify_msg);
3832
3833 LOGE("Depth buffer for frame number: %d is missing "
3834 "returning back!", currentFrameNumber);
3835 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3836 }
3837 mDepthChannel->unmapBuffer(currentFrameNumber);
3838
3839 orchestrateResult(&result);
3840 } while (currentFrameNumber < frameNumber);
3841}
3842
3843/*===========================================================================
3844 * FUNCTION : notifyErrorFoPendingDepthData
3845 *
3846 * DESCRIPTION: Returns error for any pending depth buffers
3847 *
3848 * PARAMETERS : depthCh - depth channel that needs to get flushed
3849 *
3850 * RETURN :
3851 *
3852 *==========================================================================*/
3853void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3854 QCamera3DepthChannel *depthCh) {
3855 uint32_t currentFrameNumber;
3856 buffer_handle_t *depthBuffer;
3857
3858 if (nullptr == depthCh) {
3859 return;
3860 }
3861
3862 camera3_notify_msg_t notify_msg =
3863 {.type = CAMERA3_MSG_ERROR,
3864 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3865 camera3_stream_buffer_t resultBuffer =
3866 {.acquire_fence = -1,
3867 .release_fence = -1,
3868 .buffer = nullptr,
3869 .stream = depthCh->getStream(),
3870 .status = CAMERA3_BUFFER_STATUS_ERROR};
3871 camera3_capture_result_t result =
3872 {.result = nullptr,
3873 .frame_number = 0,
3874 .num_output_buffers = 1,
3875 .partial_result = 0,
3876 .output_buffers = &resultBuffer};
3877
3878 while (nullptr !=
3879 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3880 depthCh->unmapBuffer(currentFrameNumber);
3881
3882 notify_msg.message.error.frame_number = currentFrameNumber;
3883 orchestrateNotify(&notify_msg);
3884
3885 resultBuffer.buffer = depthBuffer;
3886 result.frame_number = currentFrameNumber;
3887 orchestrateResult(&result);
3888 };
3889}
3890
3891/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003892 * FUNCTION : hdrPlusPerfLock
3893 *
3894 * DESCRIPTION: perf lock for HDR+ using custom intent
3895 *
3896 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3897 *
3898 * RETURN : None
3899 *
3900 *==========================================================================*/
3901void QCamera3HardwareInterface::hdrPlusPerfLock(
3902 mm_camera_super_buf_t *metadata_buf)
3903{
3904 if (NULL == metadata_buf) {
3905 LOGE("metadata_buf is NULL");
3906 return;
3907 }
3908 metadata_buffer_t *metadata =
3909 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3910 int32_t *p_frame_number_valid =
3911 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3912 uint32_t *p_frame_number =
3913 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3914
3915 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3916 LOGE("%s: Invalid metadata", __func__);
3917 return;
3918 }
3919
3920 //acquire perf lock for 5 sec after the last HDR frame is captured
3921 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3922 if ((p_frame_number != NULL) &&
3923 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003924 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003925 }
3926 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003927}
3928
3929/*===========================================================================
3930 * FUNCTION : handleInputBufferWithLock
3931 *
3932 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3933 *
3934 * PARAMETERS : @frame_number: frame number of the input buffer
3935 *
3936 * RETURN :
3937 *
3938 *==========================================================================*/
3939void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3940{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003941 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003942 pendingRequestIterator i = mPendingRequestsList.begin();
3943 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3944 i++;
3945 }
3946 if (i != mPendingRequestsList.end() && i->input_buffer) {
3947 //found the right request
3948 if (!i->shutter_notified) {
3949 CameraMetadata settings;
3950 camera3_notify_msg_t notify_msg;
3951 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3952 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3953 if(i->settings) {
3954 settings = i->settings;
3955 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3956 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3957 } else {
3958 LOGE("No timestamp in input settings! Using current one.");
3959 }
3960 } else {
3961 LOGE("Input settings missing!");
3962 }
3963
3964 notify_msg.type = CAMERA3_MSG_SHUTTER;
3965 notify_msg.message.shutter.frame_number = frame_number;
3966 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003967 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003968 i->shutter_notified = true;
3969 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3970 i->frame_number, notify_msg.message.shutter.timestamp);
3971 }
3972
3973 if (i->input_buffer->release_fence != -1) {
3974 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3975 close(i->input_buffer->release_fence);
3976 if (rc != OK) {
3977 LOGE("input buffer sync wait failed %d", rc);
3978 }
3979 }
3980
3981 camera3_capture_result result;
3982 memset(&result, 0, sizeof(camera3_capture_result));
3983 result.frame_number = frame_number;
3984 result.result = i->settings;
3985 result.input_buffer = i->input_buffer;
3986 result.partial_result = PARTIAL_RESULT_COUNT;
3987
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003988 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003989 LOGD("Input request metadata and input buffer frame_number = %u",
3990 i->frame_number);
3991 i = erasePendingRequest(i);
3992 } else {
3993 LOGE("Could not find input request for frame number %d", frame_number);
3994 }
3995}
3996
3997/*===========================================================================
3998 * FUNCTION : handleBufferWithLock
3999 *
4000 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4001 *
4002 * PARAMETERS : @buffer: image buffer for the callback
4003 * @frame_number: frame number of the image buffer
4004 *
4005 * RETURN :
4006 *
4007 *==========================================================================*/
4008void QCamera3HardwareInterface::handleBufferWithLock(
4009 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4010{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004011 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004012
4013 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4014 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4015 }
4016
Thierry Strudel3d639192016-09-09 11:52:26 -07004017 /* Nothing to be done during error state */
4018 if ((ERROR == mState) || (DEINIT == mState)) {
4019 return;
4020 }
4021 if (mFlushPerf) {
4022 handleBuffersDuringFlushLock(buffer);
4023 return;
4024 }
4025 //not in flush
4026 // If the frame number doesn't exist in the pending request list,
4027 // directly send the buffer to the frameworks, and update pending buffers map
4028 // Otherwise, book-keep the buffer.
4029 pendingRequestIterator i = mPendingRequestsList.begin();
4030 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4031 i++;
4032 }
4033 if (i == mPendingRequestsList.end()) {
4034 // Verify all pending requests frame_numbers are greater
4035 for (pendingRequestIterator j = mPendingRequestsList.begin();
4036 j != mPendingRequestsList.end(); j++) {
4037 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
4038 LOGW("Error: pending live frame number %d is smaller than %d",
4039 j->frame_number, frame_number);
4040 }
4041 }
4042 camera3_capture_result_t result;
4043 memset(&result, 0, sizeof(camera3_capture_result_t));
4044 result.result = NULL;
4045 result.frame_number = frame_number;
4046 result.num_output_buffers = 1;
4047 result.partial_result = 0;
4048 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4049 m != mPendingFrameDropList.end(); m++) {
4050 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4051 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4052 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4053 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4054 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4055 frame_number, streamID);
4056 m = mPendingFrameDropList.erase(m);
4057 break;
4058 }
4059 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004060 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07004061 result.output_buffers = buffer;
4062 LOGH("result frame_number = %d, buffer = %p",
4063 frame_number, buffer->buffer);
4064
4065 mPendingBuffersMap.removeBuf(buffer->buffer);
4066
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004067 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004068 } else {
4069 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004070 if (i->input_buffer->release_fence != -1) {
4071 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
4072 close(i->input_buffer->release_fence);
4073 if (rc != OK) {
4074 LOGE("input buffer sync wait failed %d", rc);
4075 }
4076 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004077 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004078
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004079 // Put buffer into the pending request
4080 for (auto &requestedBuffer : i->buffers) {
4081 if (requestedBuffer.stream == buffer->stream) {
4082 if (requestedBuffer.buffer != nullptr) {
4083 LOGE("Error: buffer is already set");
4084 } else {
4085 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
4086 sizeof(camera3_stream_buffer_t));
4087 *(requestedBuffer.buffer) = *buffer;
4088 LOGH("cache buffer %p at result frame_number %u",
4089 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07004090 }
4091 }
4092 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004093
4094 if (i->input_buffer) {
4095 // For a reprocessing request, try to send out shutter callback and result metadata.
4096 handlePendingResultsWithLock(frame_number, nullptr);
4097 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004098 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004099
4100 if (mPreviewStarted == false) {
4101 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4102 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004103 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4104
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004105 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4106 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4107 mPreviewStarted = true;
4108
4109 // Set power hint for preview
4110 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4111 }
4112 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004113}
4114
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004115void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4116 const camera_metadata_t *resultMetadata)
4117{
4118 // Find the pending request for this result metadata.
4119 auto requestIter = mPendingRequestsList.begin();
4120 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4121 requestIter++;
4122 }
4123
4124 if (requestIter == mPendingRequestsList.end()) {
4125 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4126 return;
4127 }
4128
4129 // Update the result metadata
4130 requestIter->resultMetadata = resultMetadata;
4131
4132 // Check what type of request this is.
4133 bool liveRequest = false;
4134 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004135 // HDR+ request doesn't have partial results.
4136 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004137 } else if (requestIter->input_buffer != nullptr) {
4138 // Reprocessing request result is the same as settings.
4139 requestIter->resultMetadata = requestIter->settings;
4140 // Reprocessing request doesn't have partial results.
4141 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4142 } else {
4143 liveRequest = true;
4144 requestIter->partial_result_cnt++;
4145 mPendingLiveRequest--;
4146
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004147 {
4148 Mutex::Autolock l(gHdrPlusClientLock);
4149 // For a live request, send the metadata to HDR+ client.
4150 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4151 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4152 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4153 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004154 }
4155 }
4156
4157 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4158 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4159 bool readyToSend = true;
4160
4161 // Iterate through the pending requests to send out shutter callbacks and results that are
4162 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4163 // live requests that don't have result metadata yet.
4164 auto iter = mPendingRequestsList.begin();
4165 while (iter != mPendingRequestsList.end()) {
4166 // Check if current pending request is ready. If it's not ready, the following pending
4167 // requests are also not ready.
4168 if (readyToSend && iter->resultMetadata == nullptr) {
4169 readyToSend = false;
4170 }
4171
4172 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4173
4174 std::vector<camera3_stream_buffer_t> outputBuffers;
4175
4176 camera3_capture_result_t result = {};
4177 result.frame_number = iter->frame_number;
4178 result.result = iter->resultMetadata;
4179 result.partial_result = iter->partial_result_cnt;
4180
4181 // If this pending buffer has result metadata, we may be able to send out shutter callback
4182 // and result metadata.
4183 if (iter->resultMetadata != nullptr) {
4184 if (!readyToSend) {
4185 // If any of the previous pending request is not ready, this pending request is
4186 // also not ready to send in order to keep shutter callbacks and result metadata
4187 // in order.
4188 iter++;
4189 continue;
4190 }
4191
4192 // Invoke shutter callback if not yet.
4193 if (!iter->shutter_notified) {
4194 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4195
4196 // Find the timestamp in HDR+ result metadata
4197 camera_metadata_ro_entry_t entry;
4198 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4199 ANDROID_SENSOR_TIMESTAMP, &entry);
4200 if (res != OK) {
4201 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4202 __FUNCTION__, iter->frame_number, strerror(-res), res);
4203 } else {
4204 timestamp = entry.data.i64[0];
4205 }
4206
4207 camera3_notify_msg_t notify_msg = {};
4208 notify_msg.type = CAMERA3_MSG_SHUTTER;
4209 notify_msg.message.shutter.frame_number = iter->frame_number;
4210 notify_msg.message.shutter.timestamp = timestamp;
4211 orchestrateNotify(&notify_msg);
4212 iter->shutter_notified = true;
4213 }
4214
4215 result.input_buffer = iter->input_buffer;
4216
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004217 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4218 // If the result metadata belongs to a live request, notify errors for previous pending
4219 // live requests.
4220 mPendingLiveRequest--;
4221
4222 CameraMetadata dummyMetadata;
4223 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4224 result.result = dummyMetadata.release();
4225
4226 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004227
4228 // partial_result should be PARTIAL_RESULT_CNT in case of
4229 // ERROR_RESULT.
4230 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4231 result.partial_result = PARTIAL_RESULT_COUNT;
4232
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004233 } else {
4234 iter++;
4235 continue;
4236 }
4237
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004238 // Prepare output buffer array
4239 for (auto bufferInfoIter = iter->buffers.begin();
4240 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4241 if (bufferInfoIter->buffer != nullptr) {
4242
4243 QCamera3Channel *channel =
4244 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4245 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4246
4247 // Check if this buffer is a dropped frame.
4248 auto frameDropIter = mPendingFrameDropList.begin();
4249 while (frameDropIter != mPendingFrameDropList.end()) {
4250 if((frameDropIter->stream_ID == streamID) &&
4251 (frameDropIter->frame_number == frameNumber)) {
4252 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4253 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4254 streamID);
4255 mPendingFrameDropList.erase(frameDropIter);
4256 break;
4257 } else {
4258 frameDropIter++;
4259 }
4260 }
4261
4262 // Check buffer error status
4263 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4264 bufferInfoIter->buffer->buffer);
4265 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4266
4267 outputBuffers.push_back(*(bufferInfoIter->buffer));
4268 free(bufferInfoIter->buffer);
4269 bufferInfoIter->buffer = NULL;
4270 }
4271 }
4272
4273 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4274 result.num_output_buffers = outputBuffers.size();
4275
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004276 orchestrateResult(&result);
4277
4278 // For reprocessing, result metadata is the same as settings so do not free it here to
4279 // avoid double free.
4280 if (result.result != iter->settings) {
4281 free_camera_metadata((camera_metadata_t *)result.result);
4282 }
4283 iter->resultMetadata = nullptr;
4284 iter = erasePendingRequest(iter);
4285 }
4286
4287 if (liveRequest) {
4288 for (auto &iter : mPendingRequestsList) {
4289 // Increment pipeline depth for the following pending requests.
4290 if (iter.frame_number > frameNumber) {
4291 iter.pipeline_depth++;
4292 }
4293 }
4294 }
4295
4296 unblockRequestIfNecessary();
4297}
4298
Thierry Strudel3d639192016-09-09 11:52:26 -07004299/*===========================================================================
4300 * FUNCTION : unblockRequestIfNecessary
4301 *
4302 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4303 * that mMutex is held when this function is called.
4304 *
4305 * PARAMETERS :
4306 *
4307 * RETURN :
4308 *
4309 *==========================================================================*/
4310void QCamera3HardwareInterface::unblockRequestIfNecessary()
4311{
4312 // Unblock process_capture_request
4313 pthread_cond_signal(&mRequestCond);
4314}
4315
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004316/*===========================================================================
4317 * FUNCTION : isHdrSnapshotRequest
4318 *
4319 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4320 *
4321 * PARAMETERS : camera3 request structure
4322 *
4323 * RETURN : boolean decision variable
4324 *
4325 *==========================================================================*/
4326bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4327{
4328 if (request == NULL) {
4329 LOGE("Invalid request handle");
4330 assert(0);
4331 return false;
4332 }
4333
4334 if (!mForceHdrSnapshot) {
4335 CameraMetadata frame_settings;
4336 frame_settings = request->settings;
4337
4338 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4339 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4340 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4341 return false;
4342 }
4343 } else {
4344 return false;
4345 }
4346
4347 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4348 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4349 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4350 return false;
4351 }
4352 } else {
4353 return false;
4354 }
4355 }
4356
4357 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4358 if (request->output_buffers[i].stream->format
4359 == HAL_PIXEL_FORMAT_BLOB) {
4360 return true;
4361 }
4362 }
4363
4364 return false;
4365}
4366/*===========================================================================
4367 * FUNCTION : orchestrateRequest
4368 *
4369 * DESCRIPTION: Orchestrates a capture request from camera service
4370 *
4371 * PARAMETERS :
4372 * @request : request from framework to process
4373 *
4374 * RETURN : Error status codes
4375 *
4376 *==========================================================================*/
4377int32_t QCamera3HardwareInterface::orchestrateRequest(
4378 camera3_capture_request_t *request)
4379{
4380
4381 uint32_t originalFrameNumber = request->frame_number;
4382 uint32_t originalOutputCount = request->num_output_buffers;
4383 const camera_metadata_t *original_settings = request->settings;
4384 List<InternalRequest> internallyRequestedStreams;
4385 List<InternalRequest> emptyInternalList;
4386
4387 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4388 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4389 uint32_t internalFrameNumber;
4390 CameraMetadata modified_meta;
4391
4392
4393 /* Add Blob channel to list of internally requested streams */
4394 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4395 if (request->output_buffers[i].stream->format
4396 == HAL_PIXEL_FORMAT_BLOB) {
4397 InternalRequest streamRequested;
4398 streamRequested.meteringOnly = 1;
4399 streamRequested.need_metadata = 0;
4400 streamRequested.stream = request->output_buffers[i].stream;
4401 internallyRequestedStreams.push_back(streamRequested);
4402 }
4403 }
4404 request->num_output_buffers = 0;
4405 auto itr = internallyRequestedStreams.begin();
4406
4407 /* Modify setting to set compensation */
4408 modified_meta = request->settings;
4409 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4410 uint8_t aeLock = 1;
4411 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4412 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4413 camera_metadata_t *modified_settings = modified_meta.release();
4414 request->settings = modified_settings;
4415
4416 /* Capture Settling & -2x frame */
4417 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4418 request->frame_number = internalFrameNumber;
4419 processCaptureRequest(request, internallyRequestedStreams);
4420
4421 request->num_output_buffers = originalOutputCount;
4422 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4423 request->frame_number = internalFrameNumber;
4424 processCaptureRequest(request, emptyInternalList);
4425 request->num_output_buffers = 0;
4426
4427 modified_meta = modified_settings;
4428 expCompensation = 0;
4429 aeLock = 1;
4430 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4431 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4432 modified_settings = modified_meta.release();
4433 request->settings = modified_settings;
4434
4435 /* Capture Settling & 0X frame */
4436
4437 itr = internallyRequestedStreams.begin();
4438 if (itr == internallyRequestedStreams.end()) {
4439 LOGE("Error Internally Requested Stream list is empty");
4440 assert(0);
4441 } else {
4442 itr->need_metadata = 0;
4443 itr->meteringOnly = 1;
4444 }
4445
4446 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4447 request->frame_number = internalFrameNumber;
4448 processCaptureRequest(request, internallyRequestedStreams);
4449
4450 itr = internallyRequestedStreams.begin();
4451 if (itr == internallyRequestedStreams.end()) {
4452 ALOGE("Error Internally Requested Stream list is empty");
4453 assert(0);
4454 } else {
4455 itr->need_metadata = 1;
4456 itr->meteringOnly = 0;
4457 }
4458
4459 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4460 request->frame_number = internalFrameNumber;
4461 processCaptureRequest(request, internallyRequestedStreams);
4462
4463 /* Capture 2X frame*/
4464 modified_meta = modified_settings;
4465 expCompensation = GB_HDR_2X_STEP_EV;
4466 aeLock = 1;
4467 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4468 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4469 modified_settings = modified_meta.release();
4470 request->settings = modified_settings;
4471
4472 itr = internallyRequestedStreams.begin();
4473 if (itr == internallyRequestedStreams.end()) {
4474 ALOGE("Error Internally Requested Stream list is empty");
4475 assert(0);
4476 } else {
4477 itr->need_metadata = 0;
4478 itr->meteringOnly = 1;
4479 }
4480 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4481 request->frame_number = internalFrameNumber;
4482 processCaptureRequest(request, internallyRequestedStreams);
4483
4484 itr = internallyRequestedStreams.begin();
4485 if (itr == internallyRequestedStreams.end()) {
4486 ALOGE("Error Internally Requested Stream list is empty");
4487 assert(0);
4488 } else {
4489 itr->need_metadata = 1;
4490 itr->meteringOnly = 0;
4491 }
4492
4493 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4494 request->frame_number = internalFrameNumber;
4495 processCaptureRequest(request, internallyRequestedStreams);
4496
4497
4498 /* Capture 2X on original streaming config*/
4499 internallyRequestedStreams.clear();
4500
4501 /* Restore original settings pointer */
4502 request->settings = original_settings;
4503 } else {
4504 uint32_t internalFrameNumber;
4505 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4506 request->frame_number = internalFrameNumber;
4507 return processCaptureRequest(request, internallyRequestedStreams);
4508 }
4509
4510 return NO_ERROR;
4511}
4512
4513/*===========================================================================
4514 * FUNCTION : orchestrateResult
4515 *
4516 * DESCRIPTION: Orchestrates a capture result to camera service
4517 *
4518 * PARAMETERS :
4519 * @request : request from framework to process
4520 *
4521 * RETURN :
4522 *
4523 *==========================================================================*/
4524void QCamera3HardwareInterface::orchestrateResult(
4525 camera3_capture_result_t *result)
4526{
4527 uint32_t frameworkFrameNumber;
4528 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4529 frameworkFrameNumber);
4530 if (rc != NO_ERROR) {
4531 LOGE("Cannot find translated frameworkFrameNumber");
4532 assert(0);
4533 } else {
4534 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004535 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004536 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004537 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004538 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4539 camera_metadata_entry_t entry;
4540 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4541 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004542 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004543 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4544 if (ret != OK)
4545 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004546 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004547 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004548 result->frame_number = frameworkFrameNumber;
4549 mCallbackOps->process_capture_result(mCallbackOps, result);
4550 }
4551 }
4552}
4553
4554/*===========================================================================
4555 * FUNCTION : orchestrateNotify
4556 *
4557 * DESCRIPTION: Orchestrates a notify to camera service
4558 *
4559 * PARAMETERS :
4560 * @request : request from framework to process
4561 *
4562 * RETURN :
4563 *
4564 *==========================================================================*/
4565void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4566{
4567 uint32_t frameworkFrameNumber;
4568 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004569 int32_t rc = NO_ERROR;
4570
4571 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004572 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004573
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004574 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004575 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4576 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4577 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004578 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004579 LOGE("Cannot find translated frameworkFrameNumber");
4580 assert(0);
4581 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004582 }
4583 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004584
4585 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4586 LOGD("Internal Request drop the notifyCb");
4587 } else {
4588 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4589 mCallbackOps->notify(mCallbackOps, notify_msg);
4590 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004591}
4592
4593/*===========================================================================
4594 * FUNCTION : FrameNumberRegistry
4595 *
4596 * DESCRIPTION: Constructor
4597 *
4598 * PARAMETERS :
4599 *
4600 * RETURN :
4601 *
4602 *==========================================================================*/
4603FrameNumberRegistry::FrameNumberRegistry()
4604{
4605 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4606}
4607
4608/*===========================================================================
4609 * FUNCTION : ~FrameNumberRegistry
4610 *
4611 * DESCRIPTION: Destructor
4612 *
4613 * PARAMETERS :
4614 *
4615 * RETURN :
4616 *
4617 *==========================================================================*/
4618FrameNumberRegistry::~FrameNumberRegistry()
4619{
4620}
4621
4622/*===========================================================================
4623 * FUNCTION : PurgeOldEntriesLocked
4624 *
4625 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4626 *
4627 * PARAMETERS :
4628 *
4629 * RETURN : NONE
4630 *
4631 *==========================================================================*/
4632void FrameNumberRegistry::purgeOldEntriesLocked()
4633{
4634 while (_register.begin() != _register.end()) {
4635 auto itr = _register.begin();
4636 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4637 _register.erase(itr);
4638 } else {
4639 return;
4640 }
4641 }
4642}
4643
4644/*===========================================================================
4645 * FUNCTION : allocStoreInternalFrameNumber
4646 *
4647 * DESCRIPTION: Method to note down a framework request and associate a new
4648 * internal request number against it
4649 *
4650 * PARAMETERS :
4651 * @fFrameNumber: Identifier given by framework
4652 * @internalFN : Output parameter which will have the newly generated internal
4653 * entry
4654 *
4655 * RETURN : Error code
4656 *
4657 *==========================================================================*/
4658int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4659 uint32_t &internalFrameNumber)
4660{
4661 Mutex::Autolock lock(mRegistryLock);
4662 internalFrameNumber = _nextFreeInternalNumber++;
4663 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4664 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4665 purgeOldEntriesLocked();
4666 return NO_ERROR;
4667}
4668
4669/*===========================================================================
4670 * FUNCTION : generateStoreInternalFrameNumber
4671 *
4672 * DESCRIPTION: Method to associate a new internal request number independent
4673 * of any associate with framework requests
4674 *
4675 * PARAMETERS :
4676 * @internalFrame#: Output parameter which will have the newly generated internal
4677 *
4678 *
4679 * RETURN : Error code
4680 *
4681 *==========================================================================*/
4682int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4683{
4684 Mutex::Autolock lock(mRegistryLock);
4685 internalFrameNumber = _nextFreeInternalNumber++;
4686 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4687 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4688 purgeOldEntriesLocked();
4689 return NO_ERROR;
4690}
4691
4692/*===========================================================================
4693 * FUNCTION : getFrameworkFrameNumber
4694 *
4695 * DESCRIPTION: Method to query the framework framenumber given an internal #
4696 *
4697 * PARAMETERS :
4698 * @internalFrame#: Internal reference
4699 * @frameworkframenumber: Output parameter holding framework frame entry
4700 *
4701 * RETURN : Error code
4702 *
4703 *==========================================================================*/
4704int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4705 uint32_t &frameworkFrameNumber)
4706{
4707 Mutex::Autolock lock(mRegistryLock);
4708 auto itr = _register.find(internalFrameNumber);
4709 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004710 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004711 return -ENOENT;
4712 }
4713
4714 frameworkFrameNumber = itr->second;
4715 purgeOldEntriesLocked();
4716 return NO_ERROR;
4717}
Thierry Strudel3d639192016-09-09 11:52:26 -07004718
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004719status_t QCamera3HardwareInterface::fillPbStreamConfig(
4720 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4721 QCamera3Channel *channel, uint32_t streamIndex) {
4722 if (config == nullptr) {
4723 LOGE("%s: config is null", __FUNCTION__);
4724 return BAD_VALUE;
4725 }
4726
4727 if (channel == nullptr) {
4728 LOGE("%s: channel is null", __FUNCTION__);
4729 return BAD_VALUE;
4730 }
4731
4732 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4733 if (stream == nullptr) {
4734 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4735 return NAME_NOT_FOUND;
4736 }
4737
4738 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4739 if (streamInfo == nullptr) {
4740 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4741 return NAME_NOT_FOUND;
4742 }
4743
4744 config->id = pbStreamId;
4745 config->image.width = streamInfo->dim.width;
4746 config->image.height = streamInfo->dim.height;
4747 config->image.padding = 0;
4748 config->image.format = pbStreamFormat;
4749
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004750 uint32_t totalPlaneSize = 0;
4751
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004752 // Fill plane information.
4753 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4754 pbcamera::PlaneConfiguration plane;
4755 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4756 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4757 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004758
4759 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004760 }
4761
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004762 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004763 return OK;
4764}
4765
Thierry Strudel3d639192016-09-09 11:52:26 -07004766/*===========================================================================
4767 * FUNCTION : processCaptureRequest
4768 *
4769 * DESCRIPTION: process a capture request from camera service
4770 *
4771 * PARAMETERS :
4772 * @request : request from framework to process
4773 *
4774 * RETURN :
4775 *
4776 *==========================================================================*/
4777int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004778 camera3_capture_request_t *request,
4779 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004780{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004781 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004782 int rc = NO_ERROR;
4783 int32_t request_id;
4784 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004785 bool isVidBufRequested = false;
4786 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004787 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004788
4789 pthread_mutex_lock(&mMutex);
4790
4791 // Validate current state
4792 switch (mState) {
4793 case CONFIGURED:
4794 case STARTED:
4795 /* valid state */
4796 break;
4797
4798 case ERROR:
4799 pthread_mutex_unlock(&mMutex);
4800 handleCameraDeviceError();
4801 return -ENODEV;
4802
4803 default:
4804 LOGE("Invalid state %d", mState);
4805 pthread_mutex_unlock(&mMutex);
4806 return -ENODEV;
4807 }
4808
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004809 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004810 if (rc != NO_ERROR) {
4811 LOGE("incoming request is not valid");
4812 pthread_mutex_unlock(&mMutex);
4813 return rc;
4814 }
4815
4816 meta = request->settings;
4817
4818 // For first capture request, send capture intent, and
4819 // stream on all streams
4820 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004821 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004822 // send an unconfigure to the backend so that the isp
4823 // resources are deallocated
4824 if (!mFirstConfiguration) {
4825 cam_stream_size_info_t stream_config_info;
4826 int32_t hal_version = CAM_HAL_V3;
4827 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4828 stream_config_info.buffer_info.min_buffers =
4829 MIN_INFLIGHT_REQUESTS;
4830 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004831 m_bIs4KVideo ? 0 :
4832 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004833 clear_metadata_buffer(mParameters);
4834 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4835 CAM_INTF_PARM_HAL_VERSION, hal_version);
4836 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4837 CAM_INTF_META_STREAM_INFO, stream_config_info);
4838 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4839 mParameters);
4840 if (rc < 0) {
4841 LOGE("set_parms for unconfigure failed");
4842 pthread_mutex_unlock(&mMutex);
4843 return rc;
4844 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004845
Thierry Strudel3d639192016-09-09 11:52:26 -07004846 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004847 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004848 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004849 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004850 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004851 property_get("persist.camera.is_type", is_type_value, "4");
4852 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4853 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4854 property_get("persist.camera.is_type_preview", is_type_value, "4");
4855 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4856 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004857
4858 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4859 int32_t hal_version = CAM_HAL_V3;
4860 uint8_t captureIntent =
4861 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4862 mCaptureIntent = captureIntent;
4863 clear_metadata_buffer(mParameters);
4864 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4865 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4866 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004867 if (mFirstConfiguration) {
4868 // configure instant AEC
4869 // Instant AEC is a session based parameter and it is needed only
4870 // once per complete session after open camera.
4871 // i.e. This is set only once for the first capture request, after open camera.
4872 setInstantAEC(meta);
4873 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004874 uint8_t fwkVideoStabMode=0;
4875 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4876 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4877 }
4878
Xue Tuecac74e2017-04-17 13:58:15 -07004879 // If EIS setprop is enabled then only turn it on for video/preview
4880 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004881 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004882 int32_t vsMode;
4883 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4884 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4885 rc = BAD_VALUE;
4886 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004887 LOGD("setEis %d", setEis);
4888 bool eis3Supported = false;
4889 size_t count = IS_TYPE_MAX;
4890 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4891 for (size_t i = 0; i < count; i++) {
4892 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4893 eis3Supported = true;
4894 break;
4895 }
4896 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004897
4898 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004899 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004900 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4901 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004902 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4903 is_type = isTypePreview;
4904 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4905 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4906 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004907 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004908 } else {
4909 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004910 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004911 } else {
4912 is_type = IS_TYPE_NONE;
4913 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004914 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004915 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004916 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4917 }
4918 }
4919
4920 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4921 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4922
Thierry Strudel54dc9782017-02-15 12:12:10 -08004923 //Disable tintless only if the property is set to 0
4924 memset(prop, 0, sizeof(prop));
4925 property_get("persist.camera.tintless.enable", prop, "1");
4926 int32_t tintless_value = atoi(prop);
4927
Thierry Strudel3d639192016-09-09 11:52:26 -07004928 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4929 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004930
Thierry Strudel3d639192016-09-09 11:52:26 -07004931 //Disable CDS for HFR mode or if DIS/EIS is on.
4932 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4933 //after every configure_stream
4934 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4935 (m_bIsVideo)) {
4936 int32_t cds = CAM_CDS_MODE_OFF;
4937 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4938 CAM_INTF_PARM_CDS_MODE, cds))
4939 LOGE("Failed to disable CDS for HFR mode");
4940
4941 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004942
4943 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4944 uint8_t* use_av_timer = NULL;
4945
4946 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004947 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004948 use_av_timer = &m_debug_avtimer;
4949 }
4950 else{
4951 use_av_timer =
4952 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004953 if (use_av_timer) {
4954 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4955 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004956 }
4957
4958 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4959 rc = BAD_VALUE;
4960 }
4961 }
4962
Thierry Strudel3d639192016-09-09 11:52:26 -07004963 setMobicat();
4964
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004965 uint8_t nrMode = 0;
4966 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4967 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4968 }
4969
Thierry Strudel3d639192016-09-09 11:52:26 -07004970 /* Set fps and hfr mode while sending meta stream info so that sensor
4971 * can configure appropriate streaming mode */
4972 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004973 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4974 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004975 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4976 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004977 if (rc == NO_ERROR) {
4978 int32_t max_fps =
4979 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004980 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004981 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4982 }
4983 /* For HFR, more buffers are dequeued upfront to improve the performance */
4984 if (mBatchSize) {
4985 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4986 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4987 }
4988 }
4989 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004990 LOGE("setHalFpsRange failed");
4991 }
4992 }
4993 if (meta.exists(ANDROID_CONTROL_MODE)) {
4994 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4995 rc = extractSceneMode(meta, metaMode, mParameters);
4996 if (rc != NO_ERROR) {
4997 LOGE("extractSceneMode failed");
4998 }
4999 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005000 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005001
Thierry Strudel04e026f2016-10-10 11:27:36 -07005002 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5003 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5004 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5005 rc = setVideoHdrMode(mParameters, vhdr);
5006 if (rc != NO_ERROR) {
5007 LOGE("setVideoHDR is failed");
5008 }
5009 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005010
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005011 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005012 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005013 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005014 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5015 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5016 sensorModeFullFov)) {
5017 rc = BAD_VALUE;
5018 }
5019 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005020 //TODO: validate the arguments, HSV scenemode should have only the
5021 //advertised fps ranges
5022
5023 /*set the capture intent, hal version, tintless, stream info,
5024 *and disenable parameters to the backend*/
5025 LOGD("set_parms META_STREAM_INFO " );
5026 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005027 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5028 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005029 mStreamConfigInfo.type[i],
5030 mStreamConfigInfo.stream_sizes[i].width,
5031 mStreamConfigInfo.stream_sizes[i].height,
5032 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005033 mStreamConfigInfo.format[i],
5034 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005035 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005036
Thierry Strudel3d639192016-09-09 11:52:26 -07005037 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5038 mParameters);
5039 if (rc < 0) {
5040 LOGE("set_parms failed for hal version, stream info");
5041 }
5042
Chien-Yu Chenee335912017-02-09 17:53:20 -08005043 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5044 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005045 if (rc != NO_ERROR) {
5046 LOGE("Failed to get sensor output size");
5047 pthread_mutex_unlock(&mMutex);
5048 goto error_exit;
5049 }
5050
5051 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5052 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08005053 mSensorModeInfo.active_array_size.width,
5054 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005055
5056 /* Set batchmode before initializing channel. Since registerBuffer
5057 * internally initializes some of the channels, better set batchmode
5058 * even before first register buffer */
5059 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5060 it != mStreamInfo.end(); it++) {
5061 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5062 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5063 && mBatchSize) {
5064 rc = channel->setBatchSize(mBatchSize);
5065 //Disable per frame map unmap for HFR/batchmode case
5066 rc |= channel->setPerFrameMapUnmap(false);
5067 if (NO_ERROR != rc) {
5068 LOGE("Channel init failed %d", rc);
5069 pthread_mutex_unlock(&mMutex);
5070 goto error_exit;
5071 }
5072 }
5073 }
5074
5075 //First initialize all streams
5076 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5077 it != mStreamInfo.end(); it++) {
5078 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005079
5080 /* Initial value of NR mode is needed before stream on */
5081 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005082 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5083 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005084 setEis) {
5085 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5086 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5087 is_type = mStreamConfigInfo.is_type[i];
5088 break;
5089 }
5090 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005091 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005092 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005093 rc = channel->initialize(IS_TYPE_NONE);
5094 }
5095 if (NO_ERROR != rc) {
5096 LOGE("Channel initialization failed %d", rc);
5097 pthread_mutex_unlock(&mMutex);
5098 goto error_exit;
5099 }
5100 }
5101
5102 if (mRawDumpChannel) {
5103 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5104 if (rc != NO_ERROR) {
5105 LOGE("Error: Raw Dump Channel init failed");
5106 pthread_mutex_unlock(&mMutex);
5107 goto error_exit;
5108 }
5109 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005110 if (mHdrPlusRawSrcChannel) {
5111 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5112 if (rc != NO_ERROR) {
5113 LOGE("Error: HDR+ RAW Source Channel init failed");
5114 pthread_mutex_unlock(&mMutex);
5115 goto error_exit;
5116 }
5117 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005118 if (mSupportChannel) {
5119 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5120 if (rc < 0) {
5121 LOGE("Support channel initialization failed");
5122 pthread_mutex_unlock(&mMutex);
5123 goto error_exit;
5124 }
5125 }
5126 if (mAnalysisChannel) {
5127 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5128 if (rc < 0) {
5129 LOGE("Analysis channel initialization failed");
5130 pthread_mutex_unlock(&mMutex);
5131 goto error_exit;
5132 }
5133 }
5134 if (mDummyBatchChannel) {
5135 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5136 if (rc < 0) {
5137 LOGE("mDummyBatchChannel setBatchSize failed");
5138 pthread_mutex_unlock(&mMutex);
5139 goto error_exit;
5140 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005141 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005142 if (rc < 0) {
5143 LOGE("mDummyBatchChannel initialization failed");
5144 pthread_mutex_unlock(&mMutex);
5145 goto error_exit;
5146 }
5147 }
5148
5149 // Set bundle info
5150 rc = setBundleInfo();
5151 if (rc < 0) {
5152 LOGE("setBundleInfo failed %d", rc);
5153 pthread_mutex_unlock(&mMutex);
5154 goto error_exit;
5155 }
5156
5157 //update settings from app here
5158 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5159 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5160 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5161 }
5162 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5163 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5164 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5165 }
5166 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5167 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5168 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5169
5170 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5171 (mLinkedCameraId != mCameraId) ) {
5172 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5173 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005174 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005175 goto error_exit;
5176 }
5177 }
5178
5179 // add bundle related cameras
5180 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5181 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005182 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5183 &m_pDualCamCmdPtr->bundle_info;
5184 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005185 if (mIsDeviceLinked)
5186 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5187 else
5188 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5189
5190 pthread_mutex_lock(&gCamLock);
5191
5192 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5193 LOGE("Dualcam: Invalid Session Id ");
5194 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005195 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005196 goto error_exit;
5197 }
5198
5199 if (mIsMainCamera == 1) {
5200 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5201 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005202 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005203 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005204 // related session id should be session id of linked session
5205 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5206 } else {
5207 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5208 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005209 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005210 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005211 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5212 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005213 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005214 pthread_mutex_unlock(&gCamLock);
5215
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005216 rc = mCameraHandle->ops->set_dual_cam_cmd(
5217 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005218 if (rc < 0) {
5219 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005220 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005221 goto error_exit;
5222 }
5223 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005224 goto no_error;
5225error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005226 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005227 return rc;
5228no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005229 mWokenUpByDaemon = false;
5230 mPendingLiveRequest = 0;
5231 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005232 }
5233
5234 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005235 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005236
5237 if (mFlushPerf) {
5238 //we cannot accept any requests during flush
5239 LOGE("process_capture_request cannot proceed during flush");
5240 pthread_mutex_unlock(&mMutex);
5241 return NO_ERROR; //should return an error
5242 }
5243
5244 if (meta.exists(ANDROID_REQUEST_ID)) {
5245 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5246 mCurrentRequestId = request_id;
5247 LOGD("Received request with id: %d", request_id);
5248 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5249 LOGE("Unable to find request id field, \
5250 & no previous id available");
5251 pthread_mutex_unlock(&mMutex);
5252 return NAME_NOT_FOUND;
5253 } else {
5254 LOGD("Re-using old request id");
5255 request_id = mCurrentRequestId;
5256 }
5257
5258 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5259 request->num_output_buffers,
5260 request->input_buffer,
5261 frameNumber);
5262 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005263 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005264 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005265 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005266 uint32_t snapshotStreamId = 0;
5267 for (size_t i = 0; i < request->num_output_buffers; i++) {
5268 const camera3_stream_buffer_t& output = request->output_buffers[i];
5269 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5270
Emilian Peev7650c122017-01-19 08:24:33 -08005271 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5272 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005273 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005274 blob_request = 1;
5275 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5276 }
5277
5278 if (output.acquire_fence != -1) {
5279 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5280 close(output.acquire_fence);
5281 if (rc != OK) {
5282 LOGE("sync wait failed %d", rc);
5283 pthread_mutex_unlock(&mMutex);
5284 return rc;
5285 }
5286 }
5287
Emilian Peev0f3c3162017-03-15 12:57:46 +00005288 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5289 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005290 depthRequestPresent = true;
5291 continue;
5292 }
5293
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005294 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005295 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005296
5297 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5298 isVidBufRequested = true;
5299 }
5300 }
5301
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005302 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5303 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5304 itr++) {
5305 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5306 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5307 channel->getStreamID(channel->getStreamTypeMask());
5308
5309 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5310 isVidBufRequested = true;
5311 }
5312 }
5313
Thierry Strudel3d639192016-09-09 11:52:26 -07005314 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005315 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005316 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005317 }
5318 if (blob_request && mRawDumpChannel) {
5319 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005320 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005321 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005322 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005323 }
5324
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005325 {
5326 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5327 // Request a RAW buffer if
5328 // 1. mHdrPlusRawSrcChannel is valid.
5329 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5330 // 3. There is no pending HDR+ request.
5331 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5332 mHdrPlusPendingRequests.size() == 0) {
5333 streamsArray.stream_request[streamsArray.num_streams].streamID =
5334 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5335 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5336 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005337 }
5338
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005339 //extract capture intent
5340 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5341 mCaptureIntent =
5342 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5343 }
5344
5345 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5346 mCacMode =
5347 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5348 }
5349
5350 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005351 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005352
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005353 {
5354 Mutex::Autolock l(gHdrPlusClientLock);
5355 // If this request has a still capture intent, try to submit an HDR+ request.
5356 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5357 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5358 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5359 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005360 }
5361
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005362 if (hdrPlusRequest) {
5363 // For a HDR+ request, just set the frame parameters.
5364 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5365 if (rc < 0) {
5366 LOGE("fail to set frame parameters");
5367 pthread_mutex_unlock(&mMutex);
5368 return rc;
5369 }
5370 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005371 /* Parse the settings:
5372 * - For every request in NORMAL MODE
5373 * - For every request in HFR mode during preview only case
5374 * - For first request of every batch in HFR mode during video
5375 * recording. In batchmode the same settings except frame number is
5376 * repeated in each request of the batch.
5377 */
5378 if (!mBatchSize ||
5379 (mBatchSize && !isVidBufRequested) ||
5380 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005381 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005382 if (rc < 0) {
5383 LOGE("fail to set frame parameters");
5384 pthread_mutex_unlock(&mMutex);
5385 return rc;
5386 }
5387 }
5388 /* For batchMode HFR, setFrameParameters is not called for every
5389 * request. But only frame number of the latest request is parsed.
5390 * Keep track of first and last frame numbers in a batch so that
5391 * metadata for the frame numbers of batch can be duplicated in
5392 * handleBatchMetadta */
5393 if (mBatchSize) {
5394 if (!mToBeQueuedVidBufs) {
5395 //start of the batch
5396 mFirstFrameNumberInBatch = request->frame_number;
5397 }
5398 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5399 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5400 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005401 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005402 return BAD_VALUE;
5403 }
5404 }
5405 if (mNeedSensorRestart) {
5406 /* Unlock the mutex as restartSensor waits on the channels to be
5407 * stopped, which in turn calls stream callback functions -
5408 * handleBufferWithLock and handleMetadataWithLock */
5409 pthread_mutex_unlock(&mMutex);
5410 rc = dynamicUpdateMetaStreamInfo();
5411 if (rc != NO_ERROR) {
5412 LOGE("Restarting the sensor failed");
5413 return BAD_VALUE;
5414 }
5415 mNeedSensorRestart = false;
5416 pthread_mutex_lock(&mMutex);
5417 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005418 if(mResetInstantAEC) {
5419 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5420 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5421 mResetInstantAEC = false;
5422 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005423 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005424 if (request->input_buffer->acquire_fence != -1) {
5425 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5426 close(request->input_buffer->acquire_fence);
5427 if (rc != OK) {
5428 LOGE("input buffer sync wait failed %d", rc);
5429 pthread_mutex_unlock(&mMutex);
5430 return rc;
5431 }
5432 }
5433 }
5434
5435 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5436 mLastCustIntentFrmNum = frameNumber;
5437 }
5438 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005439 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005440 pendingRequestIterator latestRequest;
5441 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005442 pendingRequest.num_buffers = depthRequestPresent ?
5443 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005444 pendingRequest.request_id = request_id;
5445 pendingRequest.blob_request = blob_request;
5446 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005447 if (request->input_buffer) {
5448 pendingRequest.input_buffer =
5449 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5450 *(pendingRequest.input_buffer) = *(request->input_buffer);
5451 pInputBuffer = pendingRequest.input_buffer;
5452 } else {
5453 pendingRequest.input_buffer = NULL;
5454 pInputBuffer = NULL;
5455 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005456 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005457
5458 pendingRequest.pipeline_depth = 0;
5459 pendingRequest.partial_result_cnt = 0;
5460 extractJpegMetadata(mCurJpegMeta, request);
5461 pendingRequest.jpegMetadata = mCurJpegMeta;
5462 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5463 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005464 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005465 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5466 mHybridAeEnable =
5467 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5468 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005469
5470 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5471 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005472 /* DevCamDebug metadata processCaptureRequest */
5473 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5474 mDevCamDebugMetaEnable =
5475 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5476 }
5477 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5478 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005479
5480 //extract CAC info
5481 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5482 mCacMode =
5483 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5484 }
5485 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005486 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005487
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005488 // extract enableZsl info
5489 if (gExposeEnableZslKey) {
5490 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5491 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5492 mZslEnabled = pendingRequest.enableZsl;
5493 } else {
5494 pendingRequest.enableZsl = mZslEnabled;
5495 }
5496 }
5497
Thierry Strudel3d639192016-09-09 11:52:26 -07005498 PendingBuffersInRequest bufsForCurRequest;
5499 bufsForCurRequest.frame_number = frameNumber;
5500 // Mark current timestamp for the new request
5501 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005502 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005503
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005504 if (hdrPlusRequest) {
5505 // Save settings for this request.
5506 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5507 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5508
5509 // Add to pending HDR+ request queue.
5510 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5511 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5512
5513 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5514 }
5515
Thierry Strudel3d639192016-09-09 11:52:26 -07005516 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005517 if ((request->output_buffers[i].stream->data_space ==
5518 HAL_DATASPACE_DEPTH) &&
5519 (HAL_PIXEL_FORMAT_BLOB ==
5520 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005521 continue;
5522 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005523 RequestedBufferInfo requestedBuf;
5524 memset(&requestedBuf, 0, sizeof(requestedBuf));
5525 requestedBuf.stream = request->output_buffers[i].stream;
5526 requestedBuf.buffer = NULL;
5527 pendingRequest.buffers.push_back(requestedBuf);
5528
5529 // Add to buffer handle the pending buffers list
5530 PendingBufferInfo bufferInfo;
5531 bufferInfo.buffer = request->output_buffers[i].buffer;
5532 bufferInfo.stream = request->output_buffers[i].stream;
5533 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5534 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5535 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5536 frameNumber, bufferInfo.buffer,
5537 channel->getStreamTypeMask(), bufferInfo.stream->format);
5538 }
5539 // Add this request packet into mPendingBuffersMap
5540 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5541 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5542 mPendingBuffersMap.get_num_overall_buffers());
5543
5544 latestRequest = mPendingRequestsList.insert(
5545 mPendingRequestsList.end(), pendingRequest);
5546 if(mFlush) {
5547 LOGI("mFlush is true");
5548 pthread_mutex_unlock(&mMutex);
5549 return NO_ERROR;
5550 }
5551
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005552 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5553 // channel.
5554 if (!hdrPlusRequest) {
5555 int indexUsed;
5556 // Notify metadata channel we receive a request
5557 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005558
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005559 if(request->input_buffer != NULL){
5560 LOGD("Input request, frame_number %d", frameNumber);
5561 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5562 if (NO_ERROR != rc) {
5563 LOGE("fail to set reproc parameters");
5564 pthread_mutex_unlock(&mMutex);
5565 return rc;
5566 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005567 }
5568
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005569 // Call request on other streams
5570 uint32_t streams_need_metadata = 0;
5571 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5572 for (size_t i = 0; i < request->num_output_buffers; i++) {
5573 const camera3_stream_buffer_t& output = request->output_buffers[i];
5574 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5575
5576 if (channel == NULL) {
5577 LOGW("invalid channel pointer for stream");
5578 continue;
5579 }
5580
5581 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5582 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5583 output.buffer, request->input_buffer, frameNumber);
5584 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005585 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005586 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5587 if (rc < 0) {
5588 LOGE("Fail to request on picture channel");
5589 pthread_mutex_unlock(&mMutex);
5590 return rc;
5591 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005592 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005593 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5594 assert(NULL != mDepthChannel);
5595 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005596
Emilian Peev7650c122017-01-19 08:24:33 -08005597 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5598 if (rc < 0) {
5599 LOGE("Fail to map on depth buffer");
5600 pthread_mutex_unlock(&mMutex);
5601 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005602 }
Emilian Peev7650c122017-01-19 08:24:33 -08005603 } else {
5604 LOGD("snapshot request with buffer %p, frame_number %d",
5605 output.buffer, frameNumber);
5606 if (!request->settings) {
5607 rc = channel->request(output.buffer, frameNumber,
5608 NULL, mPrevParameters, indexUsed);
5609 } else {
5610 rc = channel->request(output.buffer, frameNumber,
5611 NULL, mParameters, indexUsed);
5612 }
5613 if (rc < 0) {
5614 LOGE("Fail to request on picture channel");
5615 pthread_mutex_unlock(&mMutex);
5616 return rc;
5617 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005618
Emilian Peev7650c122017-01-19 08:24:33 -08005619 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5620 uint32_t j = 0;
5621 for (j = 0; j < streamsArray.num_streams; j++) {
5622 if (streamsArray.stream_request[j].streamID == streamId) {
5623 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5624 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5625 else
5626 streamsArray.stream_request[j].buf_index = indexUsed;
5627 break;
5628 }
5629 }
5630 if (j == streamsArray.num_streams) {
5631 LOGE("Did not find matching stream to update index");
5632 assert(0);
5633 }
5634
5635 pendingBufferIter->need_metadata = true;
5636 streams_need_metadata++;
5637 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005638 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005639 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5640 bool needMetadata = false;
5641 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5642 rc = yuvChannel->request(output.buffer, frameNumber,
5643 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5644 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005645 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005646 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005647 pthread_mutex_unlock(&mMutex);
5648 return rc;
5649 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005650
5651 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5652 uint32_t j = 0;
5653 for (j = 0; j < streamsArray.num_streams; j++) {
5654 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005655 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5656 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5657 else
5658 streamsArray.stream_request[j].buf_index = indexUsed;
5659 break;
5660 }
5661 }
5662 if (j == streamsArray.num_streams) {
5663 LOGE("Did not find matching stream to update index");
5664 assert(0);
5665 }
5666
5667 pendingBufferIter->need_metadata = needMetadata;
5668 if (needMetadata)
5669 streams_need_metadata += 1;
5670 LOGD("calling YUV channel request, need_metadata is %d",
5671 needMetadata);
5672 } else {
5673 LOGD("request with buffer %p, frame_number %d",
5674 output.buffer, frameNumber);
5675
5676 rc = channel->request(output.buffer, frameNumber, indexUsed);
5677
5678 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5679 uint32_t j = 0;
5680 for (j = 0; j < streamsArray.num_streams; j++) {
5681 if (streamsArray.stream_request[j].streamID == streamId) {
5682 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5683 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5684 else
5685 streamsArray.stream_request[j].buf_index = indexUsed;
5686 break;
5687 }
5688 }
5689 if (j == streamsArray.num_streams) {
5690 LOGE("Did not find matching stream to update index");
5691 assert(0);
5692 }
5693
5694 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5695 && mBatchSize) {
5696 mToBeQueuedVidBufs++;
5697 if (mToBeQueuedVidBufs == mBatchSize) {
5698 channel->queueBatchBuf();
5699 }
5700 }
5701 if (rc < 0) {
5702 LOGE("request failed");
5703 pthread_mutex_unlock(&mMutex);
5704 return rc;
5705 }
5706 }
5707 pendingBufferIter++;
5708 }
5709
5710 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5711 itr++) {
5712 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5713
5714 if (channel == NULL) {
5715 LOGE("invalid channel pointer for stream");
5716 assert(0);
5717 return BAD_VALUE;
5718 }
5719
5720 InternalRequest requestedStream;
5721 requestedStream = (*itr);
5722
5723
5724 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5725 LOGD("snapshot request internally input buffer %p, frame_number %d",
5726 request->input_buffer, frameNumber);
5727 if(request->input_buffer != NULL){
5728 rc = channel->request(NULL, frameNumber,
5729 pInputBuffer, &mReprocMeta, indexUsed, true,
5730 requestedStream.meteringOnly);
5731 if (rc < 0) {
5732 LOGE("Fail to request on picture channel");
5733 pthread_mutex_unlock(&mMutex);
5734 return rc;
5735 }
5736 } else {
5737 LOGD("snapshot request with frame_number %d", frameNumber);
5738 if (!request->settings) {
5739 rc = channel->request(NULL, frameNumber,
5740 NULL, mPrevParameters, indexUsed, true,
5741 requestedStream.meteringOnly);
5742 } else {
5743 rc = channel->request(NULL, frameNumber,
5744 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5745 }
5746 if (rc < 0) {
5747 LOGE("Fail to request on picture channel");
5748 pthread_mutex_unlock(&mMutex);
5749 return rc;
5750 }
5751
5752 if ((*itr).meteringOnly != 1) {
5753 requestedStream.need_metadata = 1;
5754 streams_need_metadata++;
5755 }
5756 }
5757
5758 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5759 uint32_t j = 0;
5760 for (j = 0; j < streamsArray.num_streams; j++) {
5761 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005762 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5763 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5764 else
5765 streamsArray.stream_request[j].buf_index = indexUsed;
5766 break;
5767 }
5768 }
5769 if (j == streamsArray.num_streams) {
5770 LOGE("Did not find matching stream to update index");
5771 assert(0);
5772 }
5773
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005774 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005775 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005776 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005777 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005778 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005779 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005780 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005781
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005782 //If 2 streams have need_metadata set to true, fail the request, unless
5783 //we copy/reference count the metadata buffer
5784 if (streams_need_metadata > 1) {
5785 LOGE("not supporting request in which two streams requires"
5786 " 2 HAL metadata for reprocessing");
5787 pthread_mutex_unlock(&mMutex);
5788 return -EINVAL;
5789 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005790
Emilian Peev7650c122017-01-19 08:24:33 -08005791 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5792 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5793 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5794 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5795 pthread_mutex_unlock(&mMutex);
5796 return BAD_VALUE;
5797 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005798 if (request->input_buffer == NULL) {
5799 /* Set the parameters to backend:
5800 * - For every request in NORMAL MODE
5801 * - For every request in HFR mode during preview only case
5802 * - Once every batch in HFR mode during video recording
5803 */
5804 if (!mBatchSize ||
5805 (mBatchSize && !isVidBufRequested) ||
5806 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5807 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5808 mBatchSize, isVidBufRequested,
5809 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005810
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005811 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5812 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5813 uint32_t m = 0;
5814 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5815 if (streamsArray.stream_request[k].streamID ==
5816 mBatchedStreamsArray.stream_request[m].streamID)
5817 break;
5818 }
5819 if (m == mBatchedStreamsArray.num_streams) {
5820 mBatchedStreamsArray.stream_request\
5821 [mBatchedStreamsArray.num_streams].streamID =
5822 streamsArray.stream_request[k].streamID;
5823 mBatchedStreamsArray.stream_request\
5824 [mBatchedStreamsArray.num_streams].buf_index =
5825 streamsArray.stream_request[k].buf_index;
5826 mBatchedStreamsArray.num_streams =
5827 mBatchedStreamsArray.num_streams + 1;
5828 }
5829 }
5830 streamsArray = mBatchedStreamsArray;
5831 }
5832 /* Update stream id of all the requested buffers */
5833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5834 streamsArray)) {
5835 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005836 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005837 return BAD_VALUE;
5838 }
5839
5840 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5841 mParameters);
5842 if (rc < 0) {
5843 LOGE("set_parms failed");
5844 }
5845 /* reset to zero coz, the batch is queued */
5846 mToBeQueuedVidBufs = 0;
5847 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5848 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5849 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005850 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5851 uint32_t m = 0;
5852 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5853 if (streamsArray.stream_request[k].streamID ==
5854 mBatchedStreamsArray.stream_request[m].streamID)
5855 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005856 }
5857 if (m == mBatchedStreamsArray.num_streams) {
5858 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5859 streamID = streamsArray.stream_request[k].streamID;
5860 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5861 buf_index = streamsArray.stream_request[k].buf_index;
5862 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5863 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005864 }
5865 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005866 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005867
5868 // Start all streams after the first setting is sent, so that the
5869 // setting can be applied sooner: (0 + apply_delay)th frame.
5870 if (mState == CONFIGURED && mChannelHandle) {
5871 //Then start them.
5872 LOGH("Start META Channel");
5873 rc = mMetadataChannel->start();
5874 if (rc < 0) {
5875 LOGE("META channel start failed");
5876 pthread_mutex_unlock(&mMutex);
5877 return rc;
5878 }
5879
5880 if (mAnalysisChannel) {
5881 rc = mAnalysisChannel->start();
5882 if (rc < 0) {
5883 LOGE("Analysis channel start failed");
5884 mMetadataChannel->stop();
5885 pthread_mutex_unlock(&mMutex);
5886 return rc;
5887 }
5888 }
5889
5890 if (mSupportChannel) {
5891 rc = mSupportChannel->start();
5892 if (rc < 0) {
5893 LOGE("Support channel start failed");
5894 mMetadataChannel->stop();
5895 /* Although support and analysis are mutually exclusive today
5896 adding it in anycase for future proofing */
5897 if (mAnalysisChannel) {
5898 mAnalysisChannel->stop();
5899 }
5900 pthread_mutex_unlock(&mMutex);
5901 return rc;
5902 }
5903 }
5904 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5905 it != mStreamInfo.end(); it++) {
5906 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5907 LOGH("Start Processing Channel mask=%d",
5908 channel->getStreamTypeMask());
5909 rc = channel->start();
5910 if (rc < 0) {
5911 LOGE("channel start failed");
5912 pthread_mutex_unlock(&mMutex);
5913 return rc;
5914 }
5915 }
5916
5917 if (mRawDumpChannel) {
5918 LOGD("Starting raw dump stream");
5919 rc = mRawDumpChannel->start();
5920 if (rc != NO_ERROR) {
5921 LOGE("Error Starting Raw Dump Channel");
5922 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5923 it != mStreamInfo.end(); it++) {
5924 QCamera3Channel *channel =
5925 (QCamera3Channel *)(*it)->stream->priv;
5926 LOGH("Stopping Processing Channel mask=%d",
5927 channel->getStreamTypeMask());
5928 channel->stop();
5929 }
5930 if (mSupportChannel)
5931 mSupportChannel->stop();
5932 if (mAnalysisChannel) {
5933 mAnalysisChannel->stop();
5934 }
5935 mMetadataChannel->stop();
5936 pthread_mutex_unlock(&mMutex);
5937 return rc;
5938 }
5939 }
5940
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005941 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005942 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005943 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005944 if (rc != NO_ERROR) {
5945 LOGE("start_channel failed %d", rc);
5946 pthread_mutex_unlock(&mMutex);
5947 return rc;
5948 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005949
5950 {
5951 // Configure Easel for stream on.
5952 Mutex::Autolock l(gHdrPlusClientLock);
5953 if (EaselManagerClientOpened) {
5954 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
5955 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
5956 if (rc != OK) {
5957 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5958 mCameraId, mSensorModeInfo.op_pixel_clk);
5959 pthread_mutex_unlock(&mMutex);
5960 return rc;
5961 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005962 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005963 }
5964 }
5965
5966 // Start sensor streaming.
5967 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5968 mChannelHandle);
5969 if (rc != NO_ERROR) {
5970 LOGE("start_sensor_stream_on failed %d", rc);
5971 pthread_mutex_unlock(&mMutex);
5972 return rc;
5973 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005974 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005975 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005976 }
5977
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005978 // Enable HDR+ mode for the first PREVIEW_INTENT request.
5979 {
5980 Mutex::Autolock l(gHdrPlusClientLock);
5981 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5982 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5983 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5984 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5985 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5986 rc = enableHdrPlusModeLocked();
5987 if (rc != OK) {
5988 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
5989 pthread_mutex_unlock(&mMutex);
5990 return rc;
5991 }
5992
5993 mFirstPreviewIntentSeen = true;
5994 }
5995 }
5996
Thierry Strudel3d639192016-09-09 11:52:26 -07005997 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5998
5999 mState = STARTED;
6000 // Added a timed condition wait
6001 struct timespec ts;
6002 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006003 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006004 if (rc < 0) {
6005 isValidTimeout = 0;
6006 LOGE("Error reading the real time clock!!");
6007 }
6008 else {
6009 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006010 int64_t timeout = 5;
6011 {
6012 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6013 // If there is a pending HDR+ request, the following requests may be blocked until the
6014 // HDR+ request is done. So allow a longer timeout.
6015 if (mHdrPlusPendingRequests.size() > 0) {
6016 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6017 }
6018 }
6019 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006020 }
6021 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006022 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006023 (mState != ERROR) && (mState != DEINIT)) {
6024 if (!isValidTimeout) {
6025 LOGD("Blocking on conditional wait");
6026 pthread_cond_wait(&mRequestCond, &mMutex);
6027 }
6028 else {
6029 LOGD("Blocking on timed conditional wait");
6030 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6031 if (rc == ETIMEDOUT) {
6032 rc = -ENODEV;
6033 LOGE("Unblocked on timeout!!!!");
6034 break;
6035 }
6036 }
6037 LOGD("Unblocked");
6038 if (mWokenUpByDaemon) {
6039 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006040 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006041 break;
6042 }
6043 }
6044 pthread_mutex_unlock(&mMutex);
6045
6046 return rc;
6047}
6048
6049/*===========================================================================
6050 * FUNCTION : dump
6051 *
6052 * DESCRIPTION:
6053 *
6054 * PARAMETERS :
6055 *
6056 *
6057 * RETURN :
6058 *==========================================================================*/
6059void QCamera3HardwareInterface::dump(int fd)
6060{
6061 pthread_mutex_lock(&mMutex);
6062 dprintf(fd, "\n Camera HAL3 information Begin \n");
6063
6064 dprintf(fd, "\nNumber of pending requests: %zu \n",
6065 mPendingRequestsList.size());
6066 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6067 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6068 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6069 for(pendingRequestIterator i = mPendingRequestsList.begin();
6070 i != mPendingRequestsList.end(); i++) {
6071 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6072 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6073 i->input_buffer);
6074 }
6075 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6076 mPendingBuffersMap.get_num_overall_buffers());
6077 dprintf(fd, "-------+------------------\n");
6078 dprintf(fd, " Frame | Stream type mask \n");
6079 dprintf(fd, "-------+------------------\n");
6080 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6081 for(auto &j : req.mPendingBufferList) {
6082 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6083 dprintf(fd, " %5d | %11d \n",
6084 req.frame_number, channel->getStreamTypeMask());
6085 }
6086 }
6087 dprintf(fd, "-------+------------------\n");
6088
6089 dprintf(fd, "\nPending frame drop list: %zu\n",
6090 mPendingFrameDropList.size());
6091 dprintf(fd, "-------+-----------\n");
6092 dprintf(fd, " Frame | Stream ID \n");
6093 dprintf(fd, "-------+-----------\n");
6094 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6095 i != mPendingFrameDropList.end(); i++) {
6096 dprintf(fd, " %5d | %9d \n",
6097 i->frame_number, i->stream_ID);
6098 }
6099 dprintf(fd, "-------+-----------\n");
6100
6101 dprintf(fd, "\n Camera HAL3 information End \n");
6102
6103 /* use dumpsys media.camera as trigger to send update debug level event */
6104 mUpdateDebugLevel = true;
6105 pthread_mutex_unlock(&mMutex);
6106 return;
6107}
6108
6109/*===========================================================================
6110 * FUNCTION : flush
6111 *
6112 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6113 * conditionally restarts channels
6114 *
6115 * PARAMETERS :
6116 * @ restartChannels: re-start all channels
6117 *
6118 *
6119 * RETURN :
6120 * 0 on success
6121 * Error code on failure
6122 *==========================================================================*/
6123int QCamera3HardwareInterface::flush(bool restartChannels)
6124{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006125 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006126 int32_t rc = NO_ERROR;
6127
6128 LOGD("Unblocking Process Capture Request");
6129 pthread_mutex_lock(&mMutex);
6130 mFlush = true;
6131 pthread_mutex_unlock(&mMutex);
6132
6133 rc = stopAllChannels();
6134 // unlink of dualcam
6135 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006136 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6137 &m_pDualCamCmdPtr->bundle_info;
6138 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006139 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6140 pthread_mutex_lock(&gCamLock);
6141
6142 if (mIsMainCamera == 1) {
6143 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6144 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006145 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006146 // related session id should be session id of linked session
6147 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6148 } else {
6149 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6150 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006151 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006152 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6153 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006154 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006155 pthread_mutex_unlock(&gCamLock);
6156
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006157 rc = mCameraHandle->ops->set_dual_cam_cmd(
6158 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006159 if (rc < 0) {
6160 LOGE("Dualcam: Unlink failed, but still proceed to close");
6161 }
6162 }
6163
6164 if (rc < 0) {
6165 LOGE("stopAllChannels failed");
6166 return rc;
6167 }
6168 if (mChannelHandle) {
6169 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6170 mChannelHandle);
6171 }
6172
6173 // Reset bundle info
6174 rc = setBundleInfo();
6175 if (rc < 0) {
6176 LOGE("setBundleInfo failed %d", rc);
6177 return rc;
6178 }
6179
6180 // Mutex Lock
6181 pthread_mutex_lock(&mMutex);
6182
6183 // Unblock process_capture_request
6184 mPendingLiveRequest = 0;
6185 pthread_cond_signal(&mRequestCond);
6186
6187 rc = notifyErrorForPendingRequests();
6188 if (rc < 0) {
6189 LOGE("notifyErrorForPendingRequests failed");
6190 pthread_mutex_unlock(&mMutex);
6191 return rc;
6192 }
6193
6194 mFlush = false;
6195
6196 // Start the Streams/Channels
6197 if (restartChannels) {
6198 rc = startAllChannels();
6199 if (rc < 0) {
6200 LOGE("startAllChannels failed");
6201 pthread_mutex_unlock(&mMutex);
6202 return rc;
6203 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006204 if (mChannelHandle) {
6205 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006206 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006207 if (rc < 0) {
6208 LOGE("start_channel failed");
6209 pthread_mutex_unlock(&mMutex);
6210 return rc;
6211 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006212 }
6213 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006214 pthread_mutex_unlock(&mMutex);
6215
6216 return 0;
6217}
6218
6219/*===========================================================================
6220 * FUNCTION : flushPerf
6221 *
6222 * DESCRIPTION: This is the performance optimization version of flush that does
6223 * not use stream off, rather flushes the system
6224 *
6225 * PARAMETERS :
6226 *
6227 *
6228 * RETURN : 0 : success
6229 * -EINVAL: input is malformed (device is not valid)
6230 * -ENODEV: if the device has encountered a serious error
6231 *==========================================================================*/
6232int QCamera3HardwareInterface::flushPerf()
6233{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006234 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006235 int32_t rc = 0;
6236 struct timespec timeout;
6237 bool timed_wait = false;
6238
6239 pthread_mutex_lock(&mMutex);
6240 mFlushPerf = true;
6241 mPendingBuffersMap.numPendingBufsAtFlush =
6242 mPendingBuffersMap.get_num_overall_buffers();
6243 LOGD("Calling flush. Wait for %d buffers to return",
6244 mPendingBuffersMap.numPendingBufsAtFlush);
6245
6246 /* send the flush event to the backend */
6247 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6248 if (rc < 0) {
6249 LOGE("Error in flush: IOCTL failure");
6250 mFlushPerf = false;
6251 pthread_mutex_unlock(&mMutex);
6252 return -ENODEV;
6253 }
6254
6255 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6256 LOGD("No pending buffers in HAL, return flush");
6257 mFlushPerf = false;
6258 pthread_mutex_unlock(&mMutex);
6259 return rc;
6260 }
6261
6262 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006263 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006264 if (rc < 0) {
6265 LOGE("Error reading the real time clock, cannot use timed wait");
6266 } else {
6267 timeout.tv_sec += FLUSH_TIMEOUT;
6268 timed_wait = true;
6269 }
6270
6271 //Block on conditional variable
6272 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6273 LOGD("Waiting on mBuffersCond");
6274 if (!timed_wait) {
6275 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6276 if (rc != 0) {
6277 LOGE("pthread_cond_wait failed due to rc = %s",
6278 strerror(rc));
6279 break;
6280 }
6281 } else {
6282 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6283 if (rc != 0) {
6284 LOGE("pthread_cond_timedwait failed due to rc = %s",
6285 strerror(rc));
6286 break;
6287 }
6288 }
6289 }
6290 if (rc != 0) {
6291 mFlushPerf = false;
6292 pthread_mutex_unlock(&mMutex);
6293 return -ENODEV;
6294 }
6295
6296 LOGD("Received buffers, now safe to return them");
6297
6298 //make sure the channels handle flush
6299 //currently only required for the picture channel to release snapshot resources
6300 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6301 it != mStreamInfo.end(); it++) {
6302 QCamera3Channel *channel = (*it)->channel;
6303 if (channel) {
6304 rc = channel->flush();
6305 if (rc) {
6306 LOGE("Flushing the channels failed with error %d", rc);
6307 // even though the channel flush failed we need to continue and
6308 // return the buffers we have to the framework, however the return
6309 // value will be an error
6310 rc = -ENODEV;
6311 }
6312 }
6313 }
6314
6315 /* notify the frameworks and send errored results */
6316 rc = notifyErrorForPendingRequests();
6317 if (rc < 0) {
6318 LOGE("notifyErrorForPendingRequests failed");
6319 pthread_mutex_unlock(&mMutex);
6320 return rc;
6321 }
6322
6323 //unblock process_capture_request
6324 mPendingLiveRequest = 0;
6325 unblockRequestIfNecessary();
6326
6327 mFlushPerf = false;
6328 pthread_mutex_unlock(&mMutex);
6329 LOGD ("Flush Operation complete. rc = %d", rc);
6330 return rc;
6331}
6332
6333/*===========================================================================
6334 * FUNCTION : handleCameraDeviceError
6335 *
6336 * DESCRIPTION: This function calls internal flush and notifies the error to
6337 * framework and updates the state variable.
6338 *
6339 * PARAMETERS : None
6340 *
6341 * RETURN : NO_ERROR on Success
6342 * Error code on failure
6343 *==========================================================================*/
6344int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6345{
6346 int32_t rc = NO_ERROR;
6347
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006348 {
6349 Mutex::Autolock lock(mFlushLock);
6350 pthread_mutex_lock(&mMutex);
6351 if (mState != ERROR) {
6352 //if mState != ERROR, nothing to be done
6353 pthread_mutex_unlock(&mMutex);
6354 return NO_ERROR;
6355 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006356 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006357
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006358 rc = flush(false /* restart channels */);
6359 if (NO_ERROR != rc) {
6360 LOGE("internal flush to handle mState = ERROR failed");
6361 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006362
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006363 pthread_mutex_lock(&mMutex);
6364 mState = DEINIT;
6365 pthread_mutex_unlock(&mMutex);
6366 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006367
6368 camera3_notify_msg_t notify_msg;
6369 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6370 notify_msg.type = CAMERA3_MSG_ERROR;
6371 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6372 notify_msg.message.error.error_stream = NULL;
6373 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006374 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006375
6376 return rc;
6377}
6378
6379/*===========================================================================
6380 * FUNCTION : captureResultCb
6381 *
6382 * DESCRIPTION: Callback handler for all capture result
6383 * (streams, as well as metadata)
6384 *
6385 * PARAMETERS :
6386 * @metadata : metadata information
6387 * @buffer : actual gralloc buffer to be returned to frameworks.
6388 * NULL if metadata.
6389 *
6390 * RETURN : NONE
6391 *==========================================================================*/
6392void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6393 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6394{
6395 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006396 pthread_mutex_lock(&mMutex);
6397 uint8_t batchSize = mBatchSize;
6398 pthread_mutex_unlock(&mMutex);
6399 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006400 handleBatchMetadata(metadata_buf,
6401 true /* free_and_bufdone_meta_buf */);
6402 } else { /* mBatchSize = 0 */
6403 hdrPlusPerfLock(metadata_buf);
6404 pthread_mutex_lock(&mMutex);
6405 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006406 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006407 true /* last urgent frame of batch metadata */,
6408 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006409 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006410 pthread_mutex_unlock(&mMutex);
6411 }
6412 } else if (isInputBuffer) {
6413 pthread_mutex_lock(&mMutex);
6414 handleInputBufferWithLock(frame_number);
6415 pthread_mutex_unlock(&mMutex);
6416 } else {
6417 pthread_mutex_lock(&mMutex);
6418 handleBufferWithLock(buffer, frame_number);
6419 pthread_mutex_unlock(&mMutex);
6420 }
6421 return;
6422}
6423
6424/*===========================================================================
6425 * FUNCTION : getReprocessibleOutputStreamId
6426 *
6427 * DESCRIPTION: Get source output stream id for the input reprocess stream
6428 * based on size and format, which would be the largest
6429 * output stream if an input stream exists.
6430 *
6431 * PARAMETERS :
6432 * @id : return the stream id if found
6433 *
6434 * RETURN : int32_t type of status
6435 * NO_ERROR -- success
6436 * none-zero failure code
6437 *==========================================================================*/
6438int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6439{
6440 /* check if any output or bidirectional stream with the same size and format
6441 and return that stream */
6442 if ((mInputStreamInfo.dim.width > 0) &&
6443 (mInputStreamInfo.dim.height > 0)) {
6444 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6445 it != mStreamInfo.end(); it++) {
6446
6447 camera3_stream_t *stream = (*it)->stream;
6448 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6449 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6450 (stream->format == mInputStreamInfo.format)) {
6451 // Usage flag for an input stream and the source output stream
6452 // may be different.
6453 LOGD("Found reprocessible output stream! %p", *it);
6454 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6455 stream->usage, mInputStreamInfo.usage);
6456
6457 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6458 if (channel != NULL && channel->mStreams[0]) {
6459 id = channel->mStreams[0]->getMyServerID();
6460 return NO_ERROR;
6461 }
6462 }
6463 }
6464 } else {
6465 LOGD("No input stream, so no reprocessible output stream");
6466 }
6467 return NAME_NOT_FOUND;
6468}
6469
6470/*===========================================================================
6471 * FUNCTION : lookupFwkName
6472 *
6473 * DESCRIPTION: In case the enum is not same in fwk and backend
6474 * make sure the parameter is correctly propogated
6475 *
6476 * PARAMETERS :
6477 * @arr : map between the two enums
6478 * @len : len of the map
6479 * @hal_name : name of the hal_parm to map
6480 *
6481 * RETURN : int type of status
6482 * fwk_name -- success
6483 * none-zero failure code
6484 *==========================================================================*/
6485template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6486 size_t len, halType hal_name)
6487{
6488
6489 for (size_t i = 0; i < len; i++) {
6490 if (arr[i].hal_name == hal_name) {
6491 return arr[i].fwk_name;
6492 }
6493 }
6494
6495 /* Not able to find matching framework type is not necessarily
6496 * an error case. This happens when mm-camera supports more attributes
6497 * than the frameworks do */
6498 LOGH("Cannot find matching framework type");
6499 return NAME_NOT_FOUND;
6500}
6501
6502/*===========================================================================
6503 * FUNCTION : lookupHalName
6504 *
6505 * DESCRIPTION: In case the enum is not same in fwk and backend
6506 * make sure the parameter is correctly propogated
6507 *
6508 * PARAMETERS :
6509 * @arr : map between the two enums
6510 * @len : len of the map
6511 * @fwk_name : name of the hal_parm to map
6512 *
6513 * RETURN : int32_t type of status
6514 * hal_name -- success
6515 * none-zero failure code
6516 *==========================================================================*/
6517template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6518 size_t len, fwkType fwk_name)
6519{
6520 for (size_t i = 0; i < len; i++) {
6521 if (arr[i].fwk_name == fwk_name) {
6522 return arr[i].hal_name;
6523 }
6524 }
6525
6526 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6527 return NAME_NOT_FOUND;
6528}
6529
6530/*===========================================================================
6531 * FUNCTION : lookupProp
6532 *
6533 * DESCRIPTION: lookup a value by its name
6534 *
6535 * PARAMETERS :
6536 * @arr : map between the two enums
6537 * @len : size of the map
6538 * @name : name to be looked up
6539 *
6540 * RETURN : Value if found
6541 * CAM_CDS_MODE_MAX if not found
6542 *==========================================================================*/
6543template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6544 size_t len, const char *name)
6545{
6546 if (name) {
6547 for (size_t i = 0; i < len; i++) {
6548 if (!strcmp(arr[i].desc, name)) {
6549 return arr[i].val;
6550 }
6551 }
6552 }
6553 return CAM_CDS_MODE_MAX;
6554}
6555
6556/*===========================================================================
6557 *
6558 * DESCRIPTION:
6559 *
6560 * PARAMETERS :
6561 * @metadata : metadata information from callback
6562 * @timestamp: metadata buffer timestamp
6563 * @request_id: request id
6564 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006565 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006566 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6567 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006568 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006569 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6570 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006571 *
6572 * RETURN : camera_metadata_t*
6573 * metadata in a format specified by fwk
6574 *==========================================================================*/
6575camera_metadata_t*
6576QCamera3HardwareInterface::translateFromHalMetadata(
6577 metadata_buffer_t *metadata,
6578 nsecs_t timestamp,
6579 int32_t request_id,
6580 const CameraMetadata& jpegMetadata,
6581 uint8_t pipeline_depth,
6582 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006583 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006584 /* DevCamDebug metadata translateFromHalMetadata argument */
6585 uint8_t DevCamDebug_meta_enable,
6586 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006587 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006588 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006589 bool lastMetadataInBatch,
6590 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006591{
6592 CameraMetadata camMetadata;
6593 camera_metadata_t *resultMetadata;
6594
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006595 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006596 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6597 * Timestamp is needed because it's used for shutter notify calculation.
6598 * */
6599 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6600 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006601 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006602 }
6603
Thierry Strudel3d639192016-09-09 11:52:26 -07006604 if (jpegMetadata.entryCount())
6605 camMetadata.append(jpegMetadata);
6606
6607 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6608 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6609 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6610 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006611 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006612 if (mBatchSize == 0) {
6613 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6614 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6615 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006616
Samuel Ha68ba5172016-12-15 18:41:12 -08006617 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6618 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6619 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6620 // DevCamDebug metadata translateFromHalMetadata AF
6621 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6622 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6623 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6624 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6625 }
6626 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6627 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6628 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6629 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6630 }
6631 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6632 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6633 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6634 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6635 }
6636 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6637 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6638 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6639 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6640 }
6641 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6642 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6643 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6644 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6645 }
6646 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6647 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6648 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6649 *DevCamDebug_af_monitor_pdaf_target_pos;
6650 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6651 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6652 }
6653 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6654 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6655 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6656 *DevCamDebug_af_monitor_pdaf_confidence;
6657 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6658 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6659 }
6660 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6661 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6662 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6663 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6664 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6665 }
6666 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6667 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6668 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6669 *DevCamDebug_af_monitor_tof_target_pos;
6670 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6671 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6672 }
6673 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6674 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6675 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6676 *DevCamDebug_af_monitor_tof_confidence;
6677 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6678 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6679 }
6680 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6681 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6682 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6683 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6684 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6685 }
6686 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6687 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6688 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6689 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6690 &fwk_DevCamDebug_af_monitor_type_select, 1);
6691 }
6692 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6693 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6694 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6695 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6696 &fwk_DevCamDebug_af_monitor_refocus, 1);
6697 }
6698 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6699 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6700 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6701 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6702 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6703 }
6704 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6705 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6706 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6707 *DevCamDebug_af_search_pdaf_target_pos;
6708 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6709 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6710 }
6711 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6712 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6713 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6714 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6715 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6716 }
6717 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6718 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6719 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6720 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6721 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6722 }
6723 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6724 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6725 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6726 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6727 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6728 }
6729 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6730 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6731 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6732 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6733 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6734 }
6735 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6736 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6737 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6738 *DevCamDebug_af_search_tof_target_pos;
6739 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6740 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6741 }
6742 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6743 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6744 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6745 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6746 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6747 }
6748 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6749 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6750 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6751 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6752 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6753 }
6754 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6755 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6756 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6757 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6758 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6759 }
6760 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6761 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6762 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6763 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6764 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6765 }
6766 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6767 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6768 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6769 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6770 &fwk_DevCamDebug_af_search_type_select, 1);
6771 }
6772 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6773 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6774 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6775 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6776 &fwk_DevCamDebug_af_search_next_pos, 1);
6777 }
6778 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6779 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6780 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6781 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6782 &fwk_DevCamDebug_af_search_target_pos, 1);
6783 }
6784 // DevCamDebug metadata translateFromHalMetadata AEC
6785 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6786 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6787 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6788 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6789 }
6790 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6791 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6792 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6793 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6794 }
6795 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6796 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6797 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6798 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6799 }
6800 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6801 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6802 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6803 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6804 }
6805 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6806 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6807 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6808 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6809 }
6810 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6811 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6812 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6813 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6814 }
6815 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6816 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6817 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6818 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6819 }
6820 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6821 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6822 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6823 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6824 }
Samuel Ha34229982017-02-17 13:51:11 -08006825 // DevCamDebug metadata translateFromHalMetadata zzHDR
6826 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6827 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6828 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6829 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6830 }
6831 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6832 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006833 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006834 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6835 }
6836 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6837 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6838 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6839 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6840 }
6841 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6842 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006843 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006844 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6845 }
6846 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6847 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6848 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6849 *DevCamDebug_aec_hdr_sensitivity_ratio;
6850 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6851 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6852 }
6853 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6854 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6855 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6856 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6857 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6858 }
6859 // DevCamDebug metadata translateFromHalMetadata ADRC
6860 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6861 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6862 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6863 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6864 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6865 }
6866 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6867 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6868 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6869 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6870 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6871 }
6872 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6873 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6874 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6875 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6876 }
6877 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6878 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6879 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6880 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6881 }
6882 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6883 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6884 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6885 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6886 }
6887 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6888 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6889 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6890 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6891 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006892 // DevCamDebug metadata translateFromHalMetadata AWB
6893 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6894 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6895 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6896 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6897 }
6898 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6899 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6900 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6901 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6902 }
6903 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6904 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6905 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6906 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6907 }
6908 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6909 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6910 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6911 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6912 }
6913 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6914 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6915 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6916 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6917 }
6918 }
6919 // atrace_end(ATRACE_TAG_ALWAYS);
6920
Thierry Strudel3d639192016-09-09 11:52:26 -07006921 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6922 int64_t fwk_frame_number = *frame_number;
6923 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6924 }
6925
6926 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6927 int32_t fps_range[2];
6928 fps_range[0] = (int32_t)float_range->min_fps;
6929 fps_range[1] = (int32_t)float_range->max_fps;
6930 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6931 fps_range, 2);
6932 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6933 fps_range[0], fps_range[1]);
6934 }
6935
6936 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6937 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6938 }
6939
6940 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6941 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6942 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6943 *sceneMode);
6944 if (NAME_NOT_FOUND != val) {
6945 uint8_t fwkSceneMode = (uint8_t)val;
6946 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6947 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6948 fwkSceneMode);
6949 }
6950 }
6951
6952 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6953 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6954 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6955 }
6956
6957 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6958 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6959 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6960 }
6961
6962 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6963 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6964 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6965 }
6966
6967 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6968 CAM_INTF_META_EDGE_MODE, metadata) {
6969 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6970 }
6971
6972 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6973 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6974 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6975 }
6976
6977 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6978 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6979 }
6980
6981 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6982 if (0 <= *flashState) {
6983 uint8_t fwk_flashState = (uint8_t) *flashState;
6984 if (!gCamCapability[mCameraId]->flash_available) {
6985 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6986 }
6987 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6988 }
6989 }
6990
6991 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6992 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6993 if (NAME_NOT_FOUND != val) {
6994 uint8_t fwk_flashMode = (uint8_t)val;
6995 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6996 }
6997 }
6998
6999 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7000 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7001 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7002 }
7003
7004 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7005 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7006 }
7007
7008 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7009 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7010 }
7011
7012 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7013 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7014 }
7015
7016 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7017 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7018 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7019 }
7020
7021 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7022 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7023 LOGD("fwk_videoStab = %d", fwk_videoStab);
7024 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7025 } else {
7026 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7027 // and so hardcoding the Video Stab result to OFF mode.
7028 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7029 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007030 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007031 }
7032
7033 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7034 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7035 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7036 }
7037
7038 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7039 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7040 }
7041
Thierry Strudel3d639192016-09-09 11:52:26 -07007042 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7043 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007044 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007045
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007046 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7047 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007048
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007049 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007050 blackLevelAppliedPattern->cam_black_level[0],
7051 blackLevelAppliedPattern->cam_black_level[1],
7052 blackLevelAppliedPattern->cam_black_level[2],
7053 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007054 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7055 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007056
7057#ifndef USE_HAL_3_3
7058 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307059 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007060 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307061 fwk_blackLevelInd[0] /= 16.0;
7062 fwk_blackLevelInd[1] /= 16.0;
7063 fwk_blackLevelInd[2] /= 16.0;
7064 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007065 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7066 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007067#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007068 }
7069
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007070#ifndef USE_HAL_3_3
7071 // Fixed whitelevel is used by ISP/Sensor
7072 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7073 &gCamCapability[mCameraId]->white_level, 1);
7074#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007075
7076 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7077 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7078 int32_t scalerCropRegion[4];
7079 scalerCropRegion[0] = hScalerCropRegion->left;
7080 scalerCropRegion[1] = hScalerCropRegion->top;
7081 scalerCropRegion[2] = hScalerCropRegion->width;
7082 scalerCropRegion[3] = hScalerCropRegion->height;
7083
7084 // Adjust crop region from sensor output coordinate system to active
7085 // array coordinate system.
7086 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7087 scalerCropRegion[2], scalerCropRegion[3]);
7088
7089 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7090 }
7091
7092 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7093 LOGD("sensorExpTime = %lld", *sensorExpTime);
7094 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7095 }
7096
7097 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7098 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7099 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7100 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7101 }
7102
7103 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7104 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7105 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7106 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7107 sensorRollingShutterSkew, 1);
7108 }
7109
7110 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7111 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7112 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7113
7114 //calculate the noise profile based on sensitivity
7115 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7116 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7117 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7118 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7119 noise_profile[i] = noise_profile_S;
7120 noise_profile[i+1] = noise_profile_O;
7121 }
7122 LOGD("noise model entry (S, O) is (%f, %f)",
7123 noise_profile_S, noise_profile_O);
7124 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7125 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7126 }
7127
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007128#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007129 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007130 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007131 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007132 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007133 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7134 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7135 }
7136 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007137#endif
7138
Thierry Strudel3d639192016-09-09 11:52:26 -07007139 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7140 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7141 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7142 }
7143
7144 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7145 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7146 *faceDetectMode);
7147 if (NAME_NOT_FOUND != val) {
7148 uint8_t fwk_faceDetectMode = (uint8_t)val;
7149 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7150
7151 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7152 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7153 CAM_INTF_META_FACE_DETECTION, metadata) {
7154 uint8_t numFaces = MIN(
7155 faceDetectionInfo->num_faces_detected, MAX_ROI);
7156 int32_t faceIds[MAX_ROI];
7157 uint8_t faceScores[MAX_ROI];
7158 int32_t faceRectangles[MAX_ROI * 4];
7159 int32_t faceLandmarks[MAX_ROI * 6];
7160 size_t j = 0, k = 0;
7161
7162 for (size_t i = 0; i < numFaces; i++) {
7163 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7164 // Adjust crop region from sensor output coordinate system to active
7165 // array coordinate system.
7166 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7167 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7168 rect.width, rect.height);
7169
7170 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7171 faceRectangles+j, -1);
7172
Jason Lee8ce36fa2017-04-19 19:40:37 -07007173 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7174 "bottom-right (%d, %d)",
7175 faceDetectionInfo->frame_id, i,
7176 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7177 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7178
Thierry Strudel3d639192016-09-09 11:52:26 -07007179 j+= 4;
7180 }
7181 if (numFaces <= 0) {
7182 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7183 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7184 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7185 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7186 }
7187
7188 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7189 numFaces);
7190 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7191 faceRectangles, numFaces * 4U);
7192 if (fwk_faceDetectMode ==
7193 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7194 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7195 CAM_INTF_META_FACE_LANDMARK, metadata) {
7196
7197 for (size_t i = 0; i < numFaces; i++) {
7198 // Map the co-ordinate sensor output coordinate system to active
7199 // array coordinate system.
7200 mCropRegionMapper.toActiveArray(
7201 landmarks->face_landmarks[i].left_eye_center.x,
7202 landmarks->face_landmarks[i].left_eye_center.y);
7203 mCropRegionMapper.toActiveArray(
7204 landmarks->face_landmarks[i].right_eye_center.x,
7205 landmarks->face_landmarks[i].right_eye_center.y);
7206 mCropRegionMapper.toActiveArray(
7207 landmarks->face_landmarks[i].mouth_center.x,
7208 landmarks->face_landmarks[i].mouth_center.y);
7209
7210 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007211
7212 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7213 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7214 faceDetectionInfo->frame_id, i,
7215 faceLandmarks[k + LEFT_EYE_X],
7216 faceLandmarks[k + LEFT_EYE_Y],
7217 faceLandmarks[k + RIGHT_EYE_X],
7218 faceLandmarks[k + RIGHT_EYE_Y],
7219 faceLandmarks[k + MOUTH_X],
7220 faceLandmarks[k + MOUTH_Y]);
7221
Thierry Strudel04e026f2016-10-10 11:27:36 -07007222 k+= TOTAL_LANDMARK_INDICES;
7223 }
7224 } else {
7225 for (size_t i = 0; i < numFaces; i++) {
7226 setInvalidLandmarks(faceLandmarks+k);
7227 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007228 }
7229 }
7230
Jason Lee49619db2017-04-13 12:07:22 -07007231 for (size_t i = 0; i < numFaces; i++) {
7232 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7233
7234 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7235 faceDetectionInfo->frame_id, i, faceIds[i]);
7236 }
7237
Thierry Strudel3d639192016-09-09 11:52:26 -07007238 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7239 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7240 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007241 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007242 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7243 CAM_INTF_META_FACE_BLINK, metadata) {
7244 uint8_t detected[MAX_ROI];
7245 uint8_t degree[MAX_ROI * 2];
7246 for (size_t i = 0; i < numFaces; i++) {
7247 detected[i] = blinks->blink[i].blink_detected;
7248 degree[2 * i] = blinks->blink[i].left_blink;
7249 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007250
Jason Lee49619db2017-04-13 12:07:22 -07007251 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7252 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7253 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7254 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007255 }
7256 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7257 detected, numFaces);
7258 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7259 degree, numFaces * 2);
7260 }
7261 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7262 CAM_INTF_META_FACE_SMILE, metadata) {
7263 uint8_t degree[MAX_ROI];
7264 uint8_t confidence[MAX_ROI];
7265 for (size_t i = 0; i < numFaces; i++) {
7266 degree[i] = smiles->smile[i].smile_degree;
7267 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007268
Jason Lee49619db2017-04-13 12:07:22 -07007269 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7270 "smile_degree=%d, smile_score=%d",
7271 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007272 }
7273 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7274 degree, numFaces);
7275 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7276 confidence, numFaces);
7277 }
7278 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7279 CAM_INTF_META_FACE_GAZE, metadata) {
7280 int8_t angle[MAX_ROI];
7281 int32_t direction[MAX_ROI * 3];
7282 int8_t degree[MAX_ROI * 2];
7283 for (size_t i = 0; i < numFaces; i++) {
7284 angle[i] = gazes->gaze[i].gaze_angle;
7285 direction[3 * i] = gazes->gaze[i].updown_dir;
7286 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7287 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7288 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7289 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007290
7291 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7292 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7293 "left_right_gaze=%d, top_bottom_gaze=%d",
7294 faceDetectionInfo->frame_id, i, angle[i],
7295 direction[3 * i], direction[3 * i + 1],
7296 direction[3 * i + 2],
7297 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007298 }
7299 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7300 (uint8_t *)angle, numFaces);
7301 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7302 direction, numFaces * 3);
7303 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7304 (uint8_t *)degree, numFaces * 2);
7305 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007306 }
7307 }
7308 }
7309 }
7310
7311 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7312 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007313 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007314 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007315 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007316
Shuzhen Wang14415f52016-11-16 18:26:18 -08007317 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7318 histogramBins = *histBins;
7319 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7320 }
7321
7322 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007323 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7324 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007325 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007326
7327 switch (stats_data->type) {
7328 case CAM_HISTOGRAM_TYPE_BAYER:
7329 switch (stats_data->bayer_stats.data_type) {
7330 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007331 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7332 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007333 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007334 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7335 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007336 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007337 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7338 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007339 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007340 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007341 case CAM_STATS_CHANNEL_R:
7342 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007343 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7344 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007345 }
7346 break;
7347 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007348 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007349 break;
7350 }
7351
Shuzhen Wang14415f52016-11-16 18:26:18 -08007352 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007353 }
7354 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007355 }
7356
7357 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7358 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7359 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7360 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7361 }
7362
7363 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7364 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7365 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7366 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7367 }
7368
7369 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7370 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7371 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7372 CAM_MAX_SHADING_MAP_HEIGHT);
7373 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7374 CAM_MAX_SHADING_MAP_WIDTH);
7375 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7376 lensShadingMap->lens_shading, 4U * map_width * map_height);
7377 }
7378
7379 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7380 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7381 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7382 }
7383
7384 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7385 //Populate CAM_INTF_META_TONEMAP_CURVES
7386 /* ch0 = G, ch 1 = B, ch 2 = R*/
7387 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7388 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7389 tonemap->tonemap_points_cnt,
7390 CAM_MAX_TONEMAP_CURVE_SIZE);
7391 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7392 }
7393
7394 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7395 &tonemap->curves[0].tonemap_points[0][0],
7396 tonemap->tonemap_points_cnt * 2);
7397
7398 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7399 &tonemap->curves[1].tonemap_points[0][0],
7400 tonemap->tonemap_points_cnt * 2);
7401
7402 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7403 &tonemap->curves[2].tonemap_points[0][0],
7404 tonemap->tonemap_points_cnt * 2);
7405 }
7406
7407 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7408 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7409 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7410 CC_GAIN_MAX);
7411 }
7412
7413 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7414 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7415 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7416 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7417 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7418 }
7419
7420 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7421 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7422 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7423 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7424 toneCurve->tonemap_points_cnt,
7425 CAM_MAX_TONEMAP_CURVE_SIZE);
7426 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7427 }
7428 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7429 (float*)toneCurve->curve.tonemap_points,
7430 toneCurve->tonemap_points_cnt * 2);
7431 }
7432
7433 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7434 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7435 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7436 predColorCorrectionGains->gains, 4);
7437 }
7438
7439 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7440 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7441 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7442 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7443 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7444 }
7445
7446 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7447 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7448 }
7449
7450 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7451 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7452 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7453 }
7454
7455 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7456 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7457 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7458 }
7459
7460 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7461 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7462 *effectMode);
7463 if (NAME_NOT_FOUND != val) {
7464 uint8_t fwk_effectMode = (uint8_t)val;
7465 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7466 }
7467 }
7468
7469 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7470 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7471 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7472 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7473 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7474 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7475 }
7476 int32_t fwk_testPatternData[4];
7477 fwk_testPatternData[0] = testPatternData->r;
7478 fwk_testPatternData[3] = testPatternData->b;
7479 switch (gCamCapability[mCameraId]->color_arrangement) {
7480 case CAM_FILTER_ARRANGEMENT_RGGB:
7481 case CAM_FILTER_ARRANGEMENT_GRBG:
7482 fwk_testPatternData[1] = testPatternData->gr;
7483 fwk_testPatternData[2] = testPatternData->gb;
7484 break;
7485 case CAM_FILTER_ARRANGEMENT_GBRG:
7486 case CAM_FILTER_ARRANGEMENT_BGGR:
7487 fwk_testPatternData[2] = testPatternData->gr;
7488 fwk_testPatternData[1] = testPatternData->gb;
7489 break;
7490 default:
7491 LOGE("color arrangement %d is not supported",
7492 gCamCapability[mCameraId]->color_arrangement);
7493 break;
7494 }
7495 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7496 }
7497
7498 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7499 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7500 }
7501
7502 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7503 String8 str((const char *)gps_methods);
7504 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7505 }
7506
7507 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7508 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7509 }
7510
7511 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7512 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7513 }
7514
7515 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7516 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7517 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7518 }
7519
7520 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7521 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7522 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7523 }
7524
7525 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7526 int32_t fwk_thumb_size[2];
7527 fwk_thumb_size[0] = thumb_size->width;
7528 fwk_thumb_size[1] = thumb_size->height;
7529 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7530 }
7531
7532 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7533 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7534 privateData,
7535 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7536 }
7537
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007538 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007539 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007540 meteringMode, 1);
7541 }
7542
Thierry Strudel54dc9782017-02-15 12:12:10 -08007543 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7544 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7545 LOGD("hdr_scene_data: %d %f\n",
7546 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7547 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7548 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7549 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7550 &isHdr, 1);
7551 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7552 &isHdrConfidence, 1);
7553 }
7554
7555
7556
Thierry Strudel3d639192016-09-09 11:52:26 -07007557 if (metadata->is_tuning_params_valid) {
7558 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7559 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7560 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7561
7562
7563 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7564 sizeof(uint32_t));
7565 data += sizeof(uint32_t);
7566
7567 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7568 sizeof(uint32_t));
7569 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7570 data += sizeof(uint32_t);
7571
7572 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7573 sizeof(uint32_t));
7574 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7575 data += sizeof(uint32_t);
7576
7577 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7578 sizeof(uint32_t));
7579 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7580 data += sizeof(uint32_t);
7581
7582 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7583 sizeof(uint32_t));
7584 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7585 data += sizeof(uint32_t);
7586
7587 metadata->tuning_params.tuning_mod3_data_size = 0;
7588 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7589 sizeof(uint32_t));
7590 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7591 data += sizeof(uint32_t);
7592
7593 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7594 TUNING_SENSOR_DATA_MAX);
7595 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7596 count);
7597 data += count;
7598
7599 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7600 TUNING_VFE_DATA_MAX);
7601 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7602 count);
7603 data += count;
7604
7605 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7606 TUNING_CPP_DATA_MAX);
7607 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7608 count);
7609 data += count;
7610
7611 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7612 TUNING_CAC_DATA_MAX);
7613 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7614 count);
7615 data += count;
7616
7617 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7618 (int32_t *)(void *)tuning_meta_data_blob,
7619 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7620 }
7621
7622 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7623 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7624 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7625 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7626 NEUTRAL_COL_POINTS);
7627 }
7628
7629 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7630 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7631 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7632 }
7633
7634 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7635 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7636 // Adjust crop region from sensor output coordinate system to active
7637 // array coordinate system.
7638 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7639 hAeRegions->rect.width, hAeRegions->rect.height);
7640
7641 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7642 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7643 REGIONS_TUPLE_COUNT);
7644 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7645 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7646 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7647 hAeRegions->rect.height);
7648 }
7649
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007650 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7651 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7652 if (NAME_NOT_FOUND != val) {
7653 uint8_t fwkAfMode = (uint8_t)val;
7654 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7655 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7656 } else {
7657 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7658 val);
7659 }
7660 }
7661
Thierry Strudel3d639192016-09-09 11:52:26 -07007662 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7663 uint8_t fwk_afState = (uint8_t) *afState;
7664 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007665 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007666 }
7667
7668 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7669 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7670 }
7671
7672 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7673 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7674 }
7675
7676 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7677 uint8_t fwk_lensState = *lensState;
7678 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7679 }
7680
Thierry Strudel3d639192016-09-09 11:52:26 -07007681
7682 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007683 uint32_t ab_mode = *hal_ab_mode;
7684 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7685 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7686 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7687 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007688 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007689 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007690 if (NAME_NOT_FOUND != val) {
7691 uint8_t fwk_ab_mode = (uint8_t)val;
7692 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7693 }
7694 }
7695
7696 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7697 int val = lookupFwkName(SCENE_MODES_MAP,
7698 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7699 if (NAME_NOT_FOUND != val) {
7700 uint8_t fwkBestshotMode = (uint8_t)val;
7701 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7702 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7703 } else {
7704 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7705 }
7706 }
7707
7708 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7709 uint8_t fwk_mode = (uint8_t) *mode;
7710 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7711 }
7712
7713 /* Constant metadata values to be update*/
7714 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7715 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7716
7717 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7718 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7719
7720 int32_t hotPixelMap[2];
7721 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7722
7723 // CDS
7724 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7725 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7726 }
7727
Thierry Strudel04e026f2016-10-10 11:27:36 -07007728 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7729 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007730 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007731 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7732 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7733 } else {
7734 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7735 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007736
7737 if(fwk_hdr != curr_hdr_state) {
7738 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7739 if(fwk_hdr)
7740 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7741 else
7742 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7743 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007744 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7745 }
7746
Thierry Strudel54dc9782017-02-15 12:12:10 -08007747 //binning correction
7748 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7749 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7750 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7751 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7752 }
7753
Thierry Strudel04e026f2016-10-10 11:27:36 -07007754 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007755 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007756 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7757 int8_t is_ir_on = 0;
7758
7759 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7760 if(is_ir_on != curr_ir_state) {
7761 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7762 if(is_ir_on)
7763 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7764 else
7765 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7766 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007767 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007768 }
7769
Thierry Strudel269c81a2016-10-12 12:13:59 -07007770 // AEC SPEED
7771 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7772 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7773 }
7774
7775 // AWB SPEED
7776 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7777 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7778 }
7779
Thierry Strudel3d639192016-09-09 11:52:26 -07007780 // TNR
7781 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7782 uint8_t tnr_enable = tnr->denoise_enable;
7783 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007784 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7785 int8_t is_tnr_on = 0;
7786
7787 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7788 if(is_tnr_on != curr_tnr_state) {
7789 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7790 if(is_tnr_on)
7791 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7792 else
7793 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7794 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007795
7796 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7797 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7798 }
7799
7800 // Reprocess crop data
7801 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7802 uint8_t cnt = crop_data->num_of_streams;
7803 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7804 // mm-qcamera-daemon only posts crop_data for streams
7805 // not linked to pproc. So no valid crop metadata is not
7806 // necessarily an error case.
7807 LOGD("No valid crop metadata entries");
7808 } else {
7809 uint32_t reproc_stream_id;
7810 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7811 LOGD("No reprocessible stream found, ignore crop data");
7812 } else {
7813 int rc = NO_ERROR;
7814 Vector<int32_t> roi_map;
7815 int32_t *crop = new int32_t[cnt*4];
7816 if (NULL == crop) {
7817 rc = NO_MEMORY;
7818 }
7819 if (NO_ERROR == rc) {
7820 int32_t streams_found = 0;
7821 for (size_t i = 0; i < cnt; i++) {
7822 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7823 if (pprocDone) {
7824 // HAL already does internal reprocessing,
7825 // either via reprocessing before JPEG encoding,
7826 // or offline postprocessing for pproc bypass case.
7827 crop[0] = 0;
7828 crop[1] = 0;
7829 crop[2] = mInputStreamInfo.dim.width;
7830 crop[3] = mInputStreamInfo.dim.height;
7831 } else {
7832 crop[0] = crop_data->crop_info[i].crop.left;
7833 crop[1] = crop_data->crop_info[i].crop.top;
7834 crop[2] = crop_data->crop_info[i].crop.width;
7835 crop[3] = crop_data->crop_info[i].crop.height;
7836 }
7837 roi_map.add(crop_data->crop_info[i].roi_map.left);
7838 roi_map.add(crop_data->crop_info[i].roi_map.top);
7839 roi_map.add(crop_data->crop_info[i].roi_map.width);
7840 roi_map.add(crop_data->crop_info[i].roi_map.height);
7841 streams_found++;
7842 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7843 crop[0], crop[1], crop[2], crop[3]);
7844 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7845 crop_data->crop_info[i].roi_map.left,
7846 crop_data->crop_info[i].roi_map.top,
7847 crop_data->crop_info[i].roi_map.width,
7848 crop_data->crop_info[i].roi_map.height);
7849 break;
7850
7851 }
7852 }
7853 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7854 &streams_found, 1);
7855 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7856 crop, (size_t)(streams_found * 4));
7857 if (roi_map.array()) {
7858 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7859 roi_map.array(), roi_map.size());
7860 }
7861 }
7862 if (crop) {
7863 delete [] crop;
7864 }
7865 }
7866 }
7867 }
7868
7869 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7870 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7871 // so hardcoding the CAC result to OFF mode.
7872 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7873 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7874 } else {
7875 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7876 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7877 *cacMode);
7878 if (NAME_NOT_FOUND != val) {
7879 uint8_t resultCacMode = (uint8_t)val;
7880 // check whether CAC result from CB is equal to Framework set CAC mode
7881 // If not equal then set the CAC mode came in corresponding request
7882 if (fwk_cacMode != resultCacMode) {
7883 resultCacMode = fwk_cacMode;
7884 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007885 //Check if CAC is disabled by property
7886 if (m_cacModeDisabled) {
7887 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7888 }
7889
Thierry Strudel3d639192016-09-09 11:52:26 -07007890 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7891 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7892 } else {
7893 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7894 }
7895 }
7896 }
7897
7898 // Post blob of cam_cds_data through vendor tag.
7899 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7900 uint8_t cnt = cdsInfo->num_of_streams;
7901 cam_cds_data_t cdsDataOverride;
7902 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7903 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7904 cdsDataOverride.num_of_streams = 1;
7905 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7906 uint32_t reproc_stream_id;
7907 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7908 LOGD("No reprocessible stream found, ignore cds data");
7909 } else {
7910 for (size_t i = 0; i < cnt; i++) {
7911 if (cdsInfo->cds_info[i].stream_id ==
7912 reproc_stream_id) {
7913 cdsDataOverride.cds_info[0].cds_enable =
7914 cdsInfo->cds_info[i].cds_enable;
7915 break;
7916 }
7917 }
7918 }
7919 } else {
7920 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7921 }
7922 camMetadata.update(QCAMERA3_CDS_INFO,
7923 (uint8_t *)&cdsDataOverride,
7924 sizeof(cam_cds_data_t));
7925 }
7926
7927 // Ldaf calibration data
7928 if (!mLdafCalibExist) {
7929 IF_META_AVAILABLE(uint32_t, ldafCalib,
7930 CAM_INTF_META_LDAF_EXIF, metadata) {
7931 mLdafCalibExist = true;
7932 mLdafCalib[0] = ldafCalib[0];
7933 mLdafCalib[1] = ldafCalib[1];
7934 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7935 ldafCalib[0], ldafCalib[1]);
7936 }
7937 }
7938
Thierry Strudel54dc9782017-02-15 12:12:10 -08007939 // EXIF debug data through vendor tag
7940 /*
7941 * Mobicat Mask can assume 3 values:
7942 * 1 refers to Mobicat data,
7943 * 2 refers to Stats Debug and Exif Debug Data
7944 * 3 refers to Mobicat and Stats Debug Data
7945 * We want to make sure that we are sending Exif debug data
7946 * only when Mobicat Mask is 2.
7947 */
7948 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7949 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7950 (uint8_t *)(void *)mExifParams.debug_params,
7951 sizeof(mm_jpeg_debug_exif_params_t));
7952 }
7953
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007954 // Reprocess and DDM debug data through vendor tag
7955 cam_reprocess_info_t repro_info;
7956 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007957 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7958 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007959 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007960 }
7961 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7962 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007963 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007964 }
7965 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7966 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007967 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007968 }
7969 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7970 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007971 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007972 }
7973 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7974 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007975 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007976 }
7977 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007978 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007979 }
7980 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7981 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007982 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007983 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007984 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7985 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7986 }
7987 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7988 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7989 }
7990 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7991 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007992
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007993 // INSTANT AEC MODE
7994 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7995 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7996 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7997 }
7998
Shuzhen Wange763e802016-03-31 10:24:29 -07007999 // AF scene change
8000 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8001 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8002 }
8003
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008004 // Enable ZSL
8005 if (enableZsl != nullptr) {
8006 uint8_t value = *enableZsl ?
8007 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8008 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8009 }
8010
Thierry Strudel3d639192016-09-09 11:52:26 -07008011 resultMetadata = camMetadata.release();
8012 return resultMetadata;
8013}
8014
8015/*===========================================================================
8016 * FUNCTION : saveExifParams
8017 *
8018 * DESCRIPTION:
8019 *
8020 * PARAMETERS :
8021 * @metadata : metadata information from callback
8022 *
8023 * RETURN : none
8024 *
8025 *==========================================================================*/
8026void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8027{
8028 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8029 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8030 if (mExifParams.debug_params) {
8031 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8032 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8033 }
8034 }
8035 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8036 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8037 if (mExifParams.debug_params) {
8038 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8039 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8040 }
8041 }
8042 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8043 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8044 if (mExifParams.debug_params) {
8045 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8046 mExifParams.debug_params->af_debug_params_valid = TRUE;
8047 }
8048 }
8049 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8050 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8051 if (mExifParams.debug_params) {
8052 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8053 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8054 }
8055 }
8056 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8057 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8058 if (mExifParams.debug_params) {
8059 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8060 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8061 }
8062 }
8063 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8064 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8065 if (mExifParams.debug_params) {
8066 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8067 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8068 }
8069 }
8070 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8071 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8072 if (mExifParams.debug_params) {
8073 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8074 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8075 }
8076 }
8077 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8078 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8079 if (mExifParams.debug_params) {
8080 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8081 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8082 }
8083 }
8084}
8085
8086/*===========================================================================
8087 * FUNCTION : get3AExifParams
8088 *
8089 * DESCRIPTION:
8090 *
8091 * PARAMETERS : none
8092 *
8093 *
8094 * RETURN : mm_jpeg_exif_params_t
8095 *
8096 *==========================================================================*/
8097mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8098{
8099 return mExifParams;
8100}
8101
8102/*===========================================================================
8103 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8104 *
8105 * DESCRIPTION:
8106 *
8107 * PARAMETERS :
8108 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008109 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8110 * urgent metadata in a batch. Always true for
8111 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008112 *
8113 * RETURN : camera_metadata_t*
8114 * metadata in a format specified by fwk
8115 *==========================================================================*/
8116camera_metadata_t*
8117QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008118 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008119{
8120 CameraMetadata camMetadata;
8121 camera_metadata_t *resultMetadata;
8122
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008123 if (!lastUrgentMetadataInBatch) {
8124 /* In batch mode, use empty metadata if this is not the last in batch
8125 */
8126 resultMetadata = allocate_camera_metadata(0, 0);
8127 return resultMetadata;
8128 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008129
8130 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8131 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8132 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8133 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8134 }
8135
8136 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8137 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8138 &aecTrigger->trigger, 1);
8139 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8140 &aecTrigger->trigger_id, 1);
8141 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8142 aecTrigger->trigger);
8143 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8144 aecTrigger->trigger_id);
8145 }
8146
8147 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8148 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8149 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8150 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8151 }
8152
Thierry Strudel3d639192016-09-09 11:52:26 -07008153 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8154 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8155 &af_trigger->trigger, 1);
8156 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8157 af_trigger->trigger);
8158 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8159 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8160 af_trigger->trigger_id);
8161 }
8162
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008163 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8164 /*af regions*/
8165 int32_t afRegions[REGIONS_TUPLE_COUNT];
8166 // Adjust crop region from sensor output coordinate system to active
8167 // array coordinate system.
8168 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8169 hAfRegions->rect.width, hAfRegions->rect.height);
8170
8171 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8172 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8173 REGIONS_TUPLE_COUNT);
8174 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8175 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8176 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8177 hAfRegions->rect.height);
8178 }
8179
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008180 // AF region confidence
8181 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8182 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8183 }
8184
Thierry Strudel3d639192016-09-09 11:52:26 -07008185 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8186 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8187 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8188 if (NAME_NOT_FOUND != val) {
8189 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8190 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8191 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8192 } else {
8193 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8194 }
8195 }
8196
8197 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8198 uint32_t aeMode = CAM_AE_MODE_MAX;
8199 int32_t flashMode = CAM_FLASH_MODE_MAX;
8200 int32_t redeye = -1;
8201 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8202 aeMode = *pAeMode;
8203 }
8204 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8205 flashMode = *pFlashMode;
8206 }
8207 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8208 redeye = *pRedeye;
8209 }
8210
8211 if (1 == redeye) {
8212 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8213 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8214 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8215 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8216 flashMode);
8217 if (NAME_NOT_FOUND != val) {
8218 fwk_aeMode = (uint8_t)val;
8219 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8220 } else {
8221 LOGE("Unsupported flash mode %d", flashMode);
8222 }
8223 } else if (aeMode == CAM_AE_MODE_ON) {
8224 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8225 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8226 } else if (aeMode == CAM_AE_MODE_OFF) {
8227 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8228 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008229 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8230 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8231 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008232 } else {
8233 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8234 "flashMode:%d, aeMode:%u!!!",
8235 redeye, flashMode, aeMode);
8236 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008237 if (mInstantAEC) {
8238 // Increment frame Idx count untill a bound reached for instant AEC.
8239 mInstantAecFrameIdxCount++;
8240 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8241 CAM_INTF_META_AEC_INFO, metadata) {
8242 LOGH("ae_params->settled = %d",ae_params->settled);
8243 // If AEC settled, or if number of frames reached bound value,
8244 // should reset instant AEC.
8245 if (ae_params->settled ||
8246 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8247 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8248 mInstantAEC = false;
8249 mResetInstantAEC = true;
8250 mInstantAecFrameIdxCount = 0;
8251 }
8252 }
8253 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008254 resultMetadata = camMetadata.release();
8255 return resultMetadata;
8256}
8257
8258/*===========================================================================
8259 * FUNCTION : dumpMetadataToFile
8260 *
8261 * DESCRIPTION: Dumps tuning metadata to file system
8262 *
8263 * PARAMETERS :
8264 * @meta : tuning metadata
8265 * @dumpFrameCount : current dump frame count
8266 * @enabled : Enable mask
8267 *
8268 *==========================================================================*/
8269void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8270 uint32_t &dumpFrameCount,
8271 bool enabled,
8272 const char *type,
8273 uint32_t frameNumber)
8274{
8275 //Some sanity checks
8276 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8277 LOGE("Tuning sensor data size bigger than expected %d: %d",
8278 meta.tuning_sensor_data_size,
8279 TUNING_SENSOR_DATA_MAX);
8280 return;
8281 }
8282
8283 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8284 LOGE("Tuning VFE data size bigger than expected %d: %d",
8285 meta.tuning_vfe_data_size,
8286 TUNING_VFE_DATA_MAX);
8287 return;
8288 }
8289
8290 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8291 LOGE("Tuning CPP data size bigger than expected %d: %d",
8292 meta.tuning_cpp_data_size,
8293 TUNING_CPP_DATA_MAX);
8294 return;
8295 }
8296
8297 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8298 LOGE("Tuning CAC data size bigger than expected %d: %d",
8299 meta.tuning_cac_data_size,
8300 TUNING_CAC_DATA_MAX);
8301 return;
8302 }
8303 //
8304
8305 if(enabled){
8306 char timeBuf[FILENAME_MAX];
8307 char buf[FILENAME_MAX];
8308 memset(buf, 0, sizeof(buf));
8309 memset(timeBuf, 0, sizeof(timeBuf));
8310 time_t current_time;
8311 struct tm * timeinfo;
8312 time (&current_time);
8313 timeinfo = localtime (&current_time);
8314 if (timeinfo != NULL) {
8315 strftime (timeBuf, sizeof(timeBuf),
8316 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8317 }
8318 String8 filePath(timeBuf);
8319 snprintf(buf,
8320 sizeof(buf),
8321 "%dm_%s_%d.bin",
8322 dumpFrameCount,
8323 type,
8324 frameNumber);
8325 filePath.append(buf);
8326 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8327 if (file_fd >= 0) {
8328 ssize_t written_len = 0;
8329 meta.tuning_data_version = TUNING_DATA_VERSION;
8330 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8331 written_len += write(file_fd, data, sizeof(uint32_t));
8332 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8333 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8334 written_len += write(file_fd, data, sizeof(uint32_t));
8335 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8336 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8337 written_len += write(file_fd, data, sizeof(uint32_t));
8338 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8339 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8340 written_len += write(file_fd, data, sizeof(uint32_t));
8341 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8342 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8343 written_len += write(file_fd, data, sizeof(uint32_t));
8344 meta.tuning_mod3_data_size = 0;
8345 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8346 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8347 written_len += write(file_fd, data, sizeof(uint32_t));
8348 size_t total_size = meta.tuning_sensor_data_size;
8349 data = (void *)((uint8_t *)&meta.data);
8350 written_len += write(file_fd, data, total_size);
8351 total_size = meta.tuning_vfe_data_size;
8352 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8353 written_len += write(file_fd, data, total_size);
8354 total_size = meta.tuning_cpp_data_size;
8355 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8356 written_len += write(file_fd, data, total_size);
8357 total_size = meta.tuning_cac_data_size;
8358 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8359 written_len += write(file_fd, data, total_size);
8360 close(file_fd);
8361 }else {
8362 LOGE("fail to open file for metadata dumping");
8363 }
8364 }
8365}
8366
8367/*===========================================================================
8368 * FUNCTION : cleanAndSortStreamInfo
8369 *
8370 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8371 * and sort them such that raw stream is at the end of the list
8372 * This is a workaround for camera daemon constraint.
8373 *
8374 * PARAMETERS : None
8375 *
8376 *==========================================================================*/
8377void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8378{
8379 List<stream_info_t *> newStreamInfo;
8380
8381 /*clean up invalid streams*/
8382 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8383 it != mStreamInfo.end();) {
8384 if(((*it)->status) == INVALID){
8385 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8386 delete channel;
8387 free(*it);
8388 it = mStreamInfo.erase(it);
8389 } else {
8390 it++;
8391 }
8392 }
8393
8394 // Move preview/video/callback/snapshot streams into newList
8395 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8396 it != mStreamInfo.end();) {
8397 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8398 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8399 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8400 newStreamInfo.push_back(*it);
8401 it = mStreamInfo.erase(it);
8402 } else
8403 it++;
8404 }
8405 // Move raw streams into newList
8406 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8407 it != mStreamInfo.end();) {
8408 newStreamInfo.push_back(*it);
8409 it = mStreamInfo.erase(it);
8410 }
8411
8412 mStreamInfo = newStreamInfo;
8413}
8414
8415/*===========================================================================
8416 * FUNCTION : extractJpegMetadata
8417 *
8418 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8419 * JPEG metadata is cached in HAL, and return as part of capture
8420 * result when metadata is returned from camera daemon.
8421 *
8422 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8423 * @request: capture request
8424 *
8425 *==========================================================================*/
8426void QCamera3HardwareInterface::extractJpegMetadata(
8427 CameraMetadata& jpegMetadata,
8428 const camera3_capture_request_t *request)
8429{
8430 CameraMetadata frame_settings;
8431 frame_settings = request->settings;
8432
8433 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8434 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8435 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8436 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8437
8438 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8439 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8440 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8441 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8442
8443 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8444 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8445 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8446 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8447
8448 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8449 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8450 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8451 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8452
8453 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8454 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8455 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8456 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8457
8458 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8459 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8460 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8461 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8462
8463 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8464 int32_t thumbnail_size[2];
8465 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8466 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8467 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8468 int32_t orientation =
8469 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008470 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008471 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8472 int32_t temp;
8473 temp = thumbnail_size[0];
8474 thumbnail_size[0] = thumbnail_size[1];
8475 thumbnail_size[1] = temp;
8476 }
8477 }
8478 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8479 thumbnail_size,
8480 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8481 }
8482
8483}
8484
8485/*===========================================================================
8486 * FUNCTION : convertToRegions
8487 *
8488 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8489 *
8490 * PARAMETERS :
8491 * @rect : cam_rect_t struct to convert
8492 * @region : int32_t destination array
8493 * @weight : if we are converting from cam_area_t, weight is valid
8494 * else weight = -1
8495 *
8496 *==========================================================================*/
8497void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8498 int32_t *region, int weight)
8499{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008500 region[FACE_LEFT] = rect.left;
8501 region[FACE_TOP] = rect.top;
8502 region[FACE_RIGHT] = rect.left + rect.width;
8503 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008504 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008505 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008506 }
8507}
8508
8509/*===========================================================================
8510 * FUNCTION : convertFromRegions
8511 *
8512 * DESCRIPTION: helper method to convert from array to cam_rect_t
8513 *
8514 * PARAMETERS :
8515 * @rect : cam_rect_t struct to convert
8516 * @region : int32_t destination array
8517 * @weight : if we are converting from cam_area_t, weight is valid
8518 * else weight = -1
8519 *
8520 *==========================================================================*/
8521void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008522 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008523{
Thierry Strudel3d639192016-09-09 11:52:26 -07008524 int32_t x_min = frame_settings.find(tag).data.i32[0];
8525 int32_t y_min = frame_settings.find(tag).data.i32[1];
8526 int32_t x_max = frame_settings.find(tag).data.i32[2];
8527 int32_t y_max = frame_settings.find(tag).data.i32[3];
8528 roi.weight = frame_settings.find(tag).data.i32[4];
8529 roi.rect.left = x_min;
8530 roi.rect.top = y_min;
8531 roi.rect.width = x_max - x_min;
8532 roi.rect.height = y_max - y_min;
8533}
8534
8535/*===========================================================================
8536 * FUNCTION : resetIfNeededROI
8537 *
8538 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8539 * crop region
8540 *
8541 * PARAMETERS :
8542 * @roi : cam_area_t struct to resize
8543 * @scalerCropRegion : cam_crop_region_t region to compare against
8544 *
8545 *
8546 *==========================================================================*/
8547bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8548 const cam_crop_region_t* scalerCropRegion)
8549{
8550 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8551 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8552 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8553 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8554
8555 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8556 * without having this check the calculations below to validate if the roi
8557 * is inside scalar crop region will fail resulting in the roi not being
8558 * reset causing algorithm to continue to use stale roi window
8559 */
8560 if (roi->weight == 0) {
8561 return true;
8562 }
8563
8564 if ((roi_x_max < scalerCropRegion->left) ||
8565 // right edge of roi window is left of scalar crop's left edge
8566 (roi_y_max < scalerCropRegion->top) ||
8567 // bottom edge of roi window is above scalar crop's top edge
8568 (roi->rect.left > crop_x_max) ||
8569 // left edge of roi window is beyond(right) of scalar crop's right edge
8570 (roi->rect.top > crop_y_max)){
8571 // top edge of roi windo is above scalar crop's top edge
8572 return false;
8573 }
8574 if (roi->rect.left < scalerCropRegion->left) {
8575 roi->rect.left = scalerCropRegion->left;
8576 }
8577 if (roi->rect.top < scalerCropRegion->top) {
8578 roi->rect.top = scalerCropRegion->top;
8579 }
8580 if (roi_x_max > crop_x_max) {
8581 roi_x_max = crop_x_max;
8582 }
8583 if (roi_y_max > crop_y_max) {
8584 roi_y_max = crop_y_max;
8585 }
8586 roi->rect.width = roi_x_max - roi->rect.left;
8587 roi->rect.height = roi_y_max - roi->rect.top;
8588 return true;
8589}
8590
8591/*===========================================================================
8592 * FUNCTION : convertLandmarks
8593 *
8594 * DESCRIPTION: helper method to extract the landmarks from face detection info
8595 *
8596 * PARAMETERS :
8597 * @landmark_data : input landmark data to be converted
8598 * @landmarks : int32_t destination array
8599 *
8600 *
8601 *==========================================================================*/
8602void QCamera3HardwareInterface::convertLandmarks(
8603 cam_face_landmarks_info_t landmark_data,
8604 int32_t *landmarks)
8605{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008606 if (landmark_data.is_left_eye_valid) {
8607 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8608 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8609 } else {
8610 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8611 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8612 }
8613
8614 if (landmark_data.is_right_eye_valid) {
8615 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8616 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8617 } else {
8618 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8619 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8620 }
8621
8622 if (landmark_data.is_mouth_valid) {
8623 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8624 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8625 } else {
8626 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8627 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8628 }
8629}
8630
8631/*===========================================================================
8632 * FUNCTION : setInvalidLandmarks
8633 *
8634 * DESCRIPTION: helper method to set invalid landmarks
8635 *
8636 * PARAMETERS :
8637 * @landmarks : int32_t destination array
8638 *
8639 *
8640 *==========================================================================*/
8641void QCamera3HardwareInterface::setInvalidLandmarks(
8642 int32_t *landmarks)
8643{
8644 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8645 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8646 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8647 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8648 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8649 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008650}
8651
8652#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008653
8654/*===========================================================================
8655 * FUNCTION : getCapabilities
8656 *
8657 * DESCRIPTION: query camera capability from back-end
8658 *
8659 * PARAMETERS :
8660 * @ops : mm-interface ops structure
8661 * @cam_handle : camera handle for which we need capability
8662 *
8663 * RETURN : ptr type of capability structure
8664 * capability for success
8665 * NULL for failure
8666 *==========================================================================*/
8667cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8668 uint32_t cam_handle)
8669{
8670 int rc = NO_ERROR;
8671 QCamera3HeapMemory *capabilityHeap = NULL;
8672 cam_capability_t *cap_ptr = NULL;
8673
8674 if (ops == NULL) {
8675 LOGE("Invalid arguments");
8676 return NULL;
8677 }
8678
8679 capabilityHeap = new QCamera3HeapMemory(1);
8680 if (capabilityHeap == NULL) {
8681 LOGE("creation of capabilityHeap failed");
8682 return NULL;
8683 }
8684
8685 /* Allocate memory for capability buffer */
8686 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8687 if(rc != OK) {
8688 LOGE("No memory for cappability");
8689 goto allocate_failed;
8690 }
8691
8692 /* Map memory for capability buffer */
8693 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8694
8695 rc = ops->map_buf(cam_handle,
8696 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8697 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8698 if(rc < 0) {
8699 LOGE("failed to map capability buffer");
8700 rc = FAILED_TRANSACTION;
8701 goto map_failed;
8702 }
8703
8704 /* Query Capability */
8705 rc = ops->query_capability(cam_handle);
8706 if(rc < 0) {
8707 LOGE("failed to query capability");
8708 rc = FAILED_TRANSACTION;
8709 goto query_failed;
8710 }
8711
8712 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8713 if (cap_ptr == NULL) {
8714 LOGE("out of memory");
8715 rc = NO_MEMORY;
8716 goto query_failed;
8717 }
8718
8719 memset(cap_ptr, 0, sizeof(cam_capability_t));
8720 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8721
8722 int index;
8723 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8724 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8725 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8726 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8727 }
8728
8729query_failed:
8730 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8731map_failed:
8732 capabilityHeap->deallocate();
8733allocate_failed:
8734 delete capabilityHeap;
8735
8736 if (rc != NO_ERROR) {
8737 return NULL;
8738 } else {
8739 return cap_ptr;
8740 }
8741}
8742
Thierry Strudel3d639192016-09-09 11:52:26 -07008743/*===========================================================================
8744 * FUNCTION : initCapabilities
8745 *
8746 * DESCRIPTION: initialize camera capabilities in static data struct
8747 *
8748 * PARAMETERS :
8749 * @cameraId : camera Id
8750 *
8751 * RETURN : int32_t type of status
8752 * NO_ERROR -- success
8753 * none-zero failure code
8754 *==========================================================================*/
8755int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8756{
8757 int rc = 0;
8758 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008759 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008760
8761 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8762 if (rc) {
8763 LOGE("camera_open failed. rc = %d", rc);
8764 goto open_failed;
8765 }
8766 if (!cameraHandle) {
8767 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8768 goto open_failed;
8769 }
8770
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008771 handle = get_main_camera_handle(cameraHandle->camera_handle);
8772 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8773 if (gCamCapability[cameraId] == NULL) {
8774 rc = FAILED_TRANSACTION;
8775 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008776 }
8777
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008778 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008779 if (is_dual_camera_by_idx(cameraId)) {
8780 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8781 gCamCapability[cameraId]->aux_cam_cap =
8782 getCapabilities(cameraHandle->ops, handle);
8783 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8784 rc = FAILED_TRANSACTION;
8785 free(gCamCapability[cameraId]);
8786 goto failed_op;
8787 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008788
8789 // Copy the main camera capability to main_cam_cap struct
8790 gCamCapability[cameraId]->main_cam_cap =
8791 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8792 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8793 LOGE("out of memory");
8794 rc = NO_MEMORY;
8795 goto failed_op;
8796 }
8797 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8798 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008799 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008800failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008801 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8802 cameraHandle = NULL;
8803open_failed:
8804 return rc;
8805}
8806
8807/*==========================================================================
8808 * FUNCTION : get3Aversion
8809 *
8810 * DESCRIPTION: get the Q3A S/W version
8811 *
8812 * PARAMETERS :
8813 * @sw_version: Reference of Q3A structure which will hold version info upon
8814 * return
8815 *
8816 * RETURN : None
8817 *
8818 *==========================================================================*/
8819void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8820{
8821 if(gCamCapability[mCameraId])
8822 sw_version = gCamCapability[mCameraId]->q3a_version;
8823 else
8824 LOGE("Capability structure NULL!");
8825}
8826
8827
8828/*===========================================================================
8829 * FUNCTION : initParameters
8830 *
8831 * DESCRIPTION: initialize camera parameters
8832 *
8833 * PARAMETERS :
8834 *
8835 * RETURN : int32_t type of status
8836 * NO_ERROR -- success
8837 * none-zero failure code
8838 *==========================================================================*/
8839int QCamera3HardwareInterface::initParameters()
8840{
8841 int rc = 0;
8842
8843 //Allocate Set Param Buffer
8844 mParamHeap = new QCamera3HeapMemory(1);
8845 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8846 if(rc != OK) {
8847 rc = NO_MEMORY;
8848 LOGE("Failed to allocate SETPARM Heap memory");
8849 delete mParamHeap;
8850 mParamHeap = NULL;
8851 return rc;
8852 }
8853
8854 //Map memory for parameters buffer
8855 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8856 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8857 mParamHeap->getFd(0),
8858 sizeof(metadata_buffer_t),
8859 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8860 if(rc < 0) {
8861 LOGE("failed to map SETPARM buffer");
8862 rc = FAILED_TRANSACTION;
8863 mParamHeap->deallocate();
8864 delete mParamHeap;
8865 mParamHeap = NULL;
8866 return rc;
8867 }
8868
8869 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8870
8871 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8872 return rc;
8873}
8874
8875/*===========================================================================
8876 * FUNCTION : deinitParameters
8877 *
8878 * DESCRIPTION: de-initialize camera parameters
8879 *
8880 * PARAMETERS :
8881 *
8882 * RETURN : NONE
8883 *==========================================================================*/
8884void QCamera3HardwareInterface::deinitParameters()
8885{
8886 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8887 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8888
8889 mParamHeap->deallocate();
8890 delete mParamHeap;
8891 mParamHeap = NULL;
8892
8893 mParameters = NULL;
8894
8895 free(mPrevParameters);
8896 mPrevParameters = NULL;
8897}
8898
8899/*===========================================================================
8900 * FUNCTION : calcMaxJpegSize
8901 *
8902 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8903 *
8904 * PARAMETERS :
8905 *
8906 * RETURN : max_jpeg_size
8907 *==========================================================================*/
8908size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8909{
8910 size_t max_jpeg_size = 0;
8911 size_t temp_width, temp_height;
8912 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8913 MAX_SIZES_CNT);
8914 for (size_t i = 0; i < count; i++) {
8915 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8916 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8917 if (temp_width * temp_height > max_jpeg_size ) {
8918 max_jpeg_size = temp_width * temp_height;
8919 }
8920 }
8921 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8922 return max_jpeg_size;
8923}
8924
8925/*===========================================================================
8926 * FUNCTION : getMaxRawSize
8927 *
8928 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8929 *
8930 * PARAMETERS :
8931 *
8932 * RETURN : Largest supported Raw Dimension
8933 *==========================================================================*/
8934cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8935{
8936 int max_width = 0;
8937 cam_dimension_t maxRawSize;
8938
8939 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8940 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8941 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8942 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8943 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8944 }
8945 }
8946 return maxRawSize;
8947}
8948
8949
8950/*===========================================================================
8951 * FUNCTION : calcMaxJpegDim
8952 *
8953 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8954 *
8955 * PARAMETERS :
8956 *
8957 * RETURN : max_jpeg_dim
8958 *==========================================================================*/
8959cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8960{
8961 cam_dimension_t max_jpeg_dim;
8962 cam_dimension_t curr_jpeg_dim;
8963 max_jpeg_dim.width = 0;
8964 max_jpeg_dim.height = 0;
8965 curr_jpeg_dim.width = 0;
8966 curr_jpeg_dim.height = 0;
8967 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8968 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8969 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8970 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8971 max_jpeg_dim.width * max_jpeg_dim.height ) {
8972 max_jpeg_dim.width = curr_jpeg_dim.width;
8973 max_jpeg_dim.height = curr_jpeg_dim.height;
8974 }
8975 }
8976 return max_jpeg_dim;
8977}
8978
8979/*===========================================================================
8980 * FUNCTION : addStreamConfig
8981 *
8982 * DESCRIPTION: adds the stream configuration to the array
8983 *
8984 * PARAMETERS :
8985 * @available_stream_configs : pointer to stream configuration array
8986 * @scalar_format : scalar format
8987 * @dim : configuration dimension
8988 * @config_type : input or output configuration type
8989 *
8990 * RETURN : NONE
8991 *==========================================================================*/
8992void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8993 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8994{
8995 available_stream_configs.add(scalar_format);
8996 available_stream_configs.add(dim.width);
8997 available_stream_configs.add(dim.height);
8998 available_stream_configs.add(config_type);
8999}
9000
9001/*===========================================================================
9002 * FUNCTION : suppportBurstCapture
9003 *
9004 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9005 *
9006 * PARAMETERS :
9007 * @cameraId : camera Id
9008 *
9009 * RETURN : true if camera supports BURST_CAPTURE
9010 * false otherwise
9011 *==========================================================================*/
9012bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9013{
9014 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9015 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9016 const int32_t highResWidth = 3264;
9017 const int32_t highResHeight = 2448;
9018
9019 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9020 // Maximum resolution images cannot be captured at >= 10fps
9021 // -> not supporting BURST_CAPTURE
9022 return false;
9023 }
9024
9025 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9026 // Maximum resolution images can be captured at >= 20fps
9027 // --> supporting BURST_CAPTURE
9028 return true;
9029 }
9030
9031 // Find the smallest highRes resolution, or largest resolution if there is none
9032 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9033 MAX_SIZES_CNT);
9034 size_t highRes = 0;
9035 while ((highRes + 1 < totalCnt) &&
9036 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9037 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9038 highResWidth * highResHeight)) {
9039 highRes++;
9040 }
9041 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9042 return true;
9043 } else {
9044 return false;
9045 }
9046}
9047
9048/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009049 * FUNCTION : getPDStatIndex
9050 *
9051 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9052 *
9053 * PARAMETERS :
9054 * @caps : camera capabilities
9055 *
9056 * RETURN : int32_t type
9057 * non-negative - on success
9058 * -1 - on failure
9059 *==========================================================================*/
9060int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9061 if (nullptr == caps) {
9062 return -1;
9063 }
9064
9065 uint32_t metaRawCount = caps->meta_raw_channel_count;
9066 int32_t ret = -1;
9067 for (size_t i = 0; i < metaRawCount; i++) {
9068 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9069 ret = i;
9070 break;
9071 }
9072 }
9073
9074 return ret;
9075}
9076
9077/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009078 * FUNCTION : initStaticMetadata
9079 *
9080 * DESCRIPTION: initialize the static metadata
9081 *
9082 * PARAMETERS :
9083 * @cameraId : camera Id
9084 *
9085 * RETURN : int32_t type of status
9086 * 0 -- success
9087 * non-zero failure code
9088 *==========================================================================*/
9089int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9090{
9091 int rc = 0;
9092 CameraMetadata staticInfo;
9093 size_t count = 0;
9094 bool limitedDevice = false;
9095 char prop[PROPERTY_VALUE_MAX];
9096 bool supportBurst = false;
9097
9098 supportBurst = supportBurstCapture(cameraId);
9099
9100 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9101 * guaranteed or if min fps of max resolution is less than 20 fps, its
9102 * advertised as limited device*/
9103 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9104 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9105 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9106 !supportBurst;
9107
9108 uint8_t supportedHwLvl = limitedDevice ?
9109 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009110#ifndef USE_HAL_3_3
9111 // LEVEL_3 - This device will support level 3.
9112 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9113#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009114 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009115#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009116
9117 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9118 &supportedHwLvl, 1);
9119
9120 bool facingBack = false;
9121 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9122 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9123 facingBack = true;
9124 }
9125 /*HAL 3 only*/
9126 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9127 &gCamCapability[cameraId]->min_focus_distance, 1);
9128
9129 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9130 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9131
9132 /*should be using focal lengths but sensor doesn't provide that info now*/
9133 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9134 &gCamCapability[cameraId]->focal_length,
9135 1);
9136
9137 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9138 gCamCapability[cameraId]->apertures,
9139 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9140
9141 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9142 gCamCapability[cameraId]->filter_densities,
9143 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9144
9145
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009146 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9147 size_t mode_count =
9148 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9149 for (size_t i = 0; i < mode_count; i++) {
9150 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9151 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009152 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009153 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009154
9155 int32_t lens_shading_map_size[] = {
9156 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9157 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9158 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9159 lens_shading_map_size,
9160 sizeof(lens_shading_map_size)/sizeof(int32_t));
9161
9162 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9163 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9164
9165 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9166 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9167
9168 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9169 &gCamCapability[cameraId]->max_frame_duration, 1);
9170
9171 camera_metadata_rational baseGainFactor = {
9172 gCamCapability[cameraId]->base_gain_factor.numerator,
9173 gCamCapability[cameraId]->base_gain_factor.denominator};
9174 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9175 &baseGainFactor, 1);
9176
9177 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9178 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9179
9180 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9181 gCamCapability[cameraId]->pixel_array_size.height};
9182 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9183 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9184
9185 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9186 gCamCapability[cameraId]->active_array_size.top,
9187 gCamCapability[cameraId]->active_array_size.width,
9188 gCamCapability[cameraId]->active_array_size.height};
9189 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9190 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9191
9192 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9193 &gCamCapability[cameraId]->white_level, 1);
9194
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009195 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9196 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9197 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009198 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009199 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009200
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009201#ifndef USE_HAL_3_3
9202 bool hasBlackRegions = false;
9203 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9204 LOGW("black_region_count: %d is bounded to %d",
9205 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9206 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9207 }
9208 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9209 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9210 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9211 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9212 }
9213 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9214 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9215 hasBlackRegions = true;
9216 }
9217#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009218 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9219 &gCamCapability[cameraId]->flash_charge_duration, 1);
9220
9221 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9222 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9223
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009224 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9225 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9226 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009227 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9228 &timestampSource, 1);
9229
Thierry Strudel54dc9782017-02-15 12:12:10 -08009230 //update histogram vendor data
9231 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009232 &gCamCapability[cameraId]->histogram_size, 1);
9233
Thierry Strudel54dc9782017-02-15 12:12:10 -08009234 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009235 &gCamCapability[cameraId]->max_histogram_count, 1);
9236
Shuzhen Wang14415f52016-11-16 18:26:18 -08009237 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9238 //so that app can request fewer number of bins than the maximum supported.
9239 std::vector<int32_t> histBins;
9240 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9241 histBins.push_back(maxHistBins);
9242 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9243 (maxHistBins & 0x1) == 0) {
9244 histBins.push_back(maxHistBins >> 1);
9245 maxHistBins >>= 1;
9246 }
9247 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9248 histBins.data(), histBins.size());
9249
Thierry Strudel3d639192016-09-09 11:52:26 -07009250 int32_t sharpness_map_size[] = {
9251 gCamCapability[cameraId]->sharpness_map_size.width,
9252 gCamCapability[cameraId]->sharpness_map_size.height};
9253
9254 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9255 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9256
9257 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9258 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9259
Emilian Peev0f3c3162017-03-15 12:57:46 +00009260 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9261 if (0 <= indexPD) {
9262 // Advertise PD stats data as part of the Depth capabilities
9263 int32_t depthWidth =
9264 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9265 int32_t depthHeight =
9266 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9267 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9268 assert(0 < depthSamplesCount);
9269 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9270 &depthSamplesCount, 1);
9271
9272 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9273 depthHeight,
9274 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9275 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9276 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9277 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9278 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9279
9280 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9281 depthHeight, 33333333,
9282 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9283 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9284 depthMinDuration,
9285 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9286
9287 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9288 depthHeight, 0,
9289 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9290 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9291 depthStallDuration,
9292 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9293
9294 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9295 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9296 }
9297
Thierry Strudel3d639192016-09-09 11:52:26 -07009298 int32_t scalar_formats[] = {
9299 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9300 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9301 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9302 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9303 HAL_PIXEL_FORMAT_RAW10,
9304 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009305 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9306 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9307 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009308
9309 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9310 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9311 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9312 count, MAX_SIZES_CNT, available_processed_sizes);
9313 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9314 available_processed_sizes, count * 2);
9315
9316 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9317 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9318 makeTable(gCamCapability[cameraId]->raw_dim,
9319 count, MAX_SIZES_CNT, available_raw_sizes);
9320 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9321 available_raw_sizes, count * 2);
9322
9323 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9324 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9325 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9326 count, MAX_SIZES_CNT, available_fps_ranges);
9327 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9328 available_fps_ranges, count * 2);
9329
9330 camera_metadata_rational exposureCompensationStep = {
9331 gCamCapability[cameraId]->exp_compensation_step.numerator,
9332 gCamCapability[cameraId]->exp_compensation_step.denominator};
9333 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9334 &exposureCompensationStep, 1);
9335
9336 Vector<uint8_t> availableVstabModes;
9337 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9338 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009339 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009340 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009341 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009342 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009343 count = IS_TYPE_MAX;
9344 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9345 for (size_t i = 0; i < count; i++) {
9346 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9347 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9348 eisSupported = true;
9349 break;
9350 }
9351 }
9352 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009353 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9354 }
9355 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9356 availableVstabModes.array(), availableVstabModes.size());
9357
9358 /*HAL 1 and HAL 3 common*/
9359 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9360 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9361 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009362 // Cap the max zoom to the max preferred value
9363 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009364 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9365 &maxZoom, 1);
9366
9367 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9368 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9369
9370 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9371 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9372 max3aRegions[2] = 0; /* AF not supported */
9373 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9374 max3aRegions, 3);
9375
9376 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9377 memset(prop, 0, sizeof(prop));
9378 property_get("persist.camera.facedetect", prop, "1");
9379 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9380 LOGD("Support face detection mode: %d",
9381 supportedFaceDetectMode);
9382
9383 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009384 /* support mode should be OFF if max number of face is 0 */
9385 if (maxFaces <= 0) {
9386 supportedFaceDetectMode = 0;
9387 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009388 Vector<uint8_t> availableFaceDetectModes;
9389 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9390 if (supportedFaceDetectMode == 1) {
9391 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9392 } else if (supportedFaceDetectMode == 2) {
9393 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9394 } else if (supportedFaceDetectMode == 3) {
9395 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9396 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9397 } else {
9398 maxFaces = 0;
9399 }
9400 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9401 availableFaceDetectModes.array(),
9402 availableFaceDetectModes.size());
9403 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9404 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009405 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9406 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9407 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009408
9409 int32_t exposureCompensationRange[] = {
9410 gCamCapability[cameraId]->exposure_compensation_min,
9411 gCamCapability[cameraId]->exposure_compensation_max};
9412 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9413 exposureCompensationRange,
9414 sizeof(exposureCompensationRange)/sizeof(int32_t));
9415
9416 uint8_t lensFacing = (facingBack) ?
9417 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9418 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9419
9420 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9421 available_thumbnail_sizes,
9422 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9423
9424 /*all sizes will be clubbed into this tag*/
9425 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9426 /*android.scaler.availableStreamConfigurations*/
9427 Vector<int32_t> available_stream_configs;
9428 cam_dimension_t active_array_dim;
9429 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9430 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009431
9432 /*advertise list of input dimensions supported based on below property.
9433 By default all sizes upto 5MP will be advertised.
9434 Note that the setprop resolution format should be WxH.
9435 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9436 To list all supported sizes, setprop needs to be set with "0x0" */
9437 cam_dimension_t minInputSize = {2592,1944}; //5MP
9438 memset(prop, 0, sizeof(prop));
9439 property_get("persist.camera.input.minsize", prop, "2592x1944");
9440 if (strlen(prop) > 0) {
9441 char *saveptr = NULL;
9442 char *token = strtok_r(prop, "x", &saveptr);
9443 if (token != NULL) {
9444 minInputSize.width = atoi(token);
9445 }
9446 token = strtok_r(NULL, "x", &saveptr);
9447 if (token != NULL) {
9448 minInputSize.height = atoi(token);
9449 }
9450 }
9451
Thierry Strudel3d639192016-09-09 11:52:26 -07009452 /* Add input/output stream configurations for each scalar formats*/
9453 for (size_t j = 0; j < scalar_formats_count; j++) {
9454 switch (scalar_formats[j]) {
9455 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9456 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9457 case HAL_PIXEL_FORMAT_RAW10:
9458 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9459 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9460 addStreamConfig(available_stream_configs, scalar_formats[j],
9461 gCamCapability[cameraId]->raw_dim[i],
9462 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9463 }
9464 break;
9465 case HAL_PIXEL_FORMAT_BLOB:
9466 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9467 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9468 addStreamConfig(available_stream_configs, scalar_formats[j],
9469 gCamCapability[cameraId]->picture_sizes_tbl[i],
9470 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9471 }
9472 break;
9473 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9474 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9475 default:
9476 cam_dimension_t largest_picture_size;
9477 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9478 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9479 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9480 addStreamConfig(available_stream_configs, scalar_formats[j],
9481 gCamCapability[cameraId]->picture_sizes_tbl[i],
9482 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009483 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009484 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9485 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009486 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9487 >= minInputSize.width) || (gCamCapability[cameraId]->
9488 picture_sizes_tbl[i].height >= minInputSize.height)) {
9489 addStreamConfig(available_stream_configs, scalar_formats[j],
9490 gCamCapability[cameraId]->picture_sizes_tbl[i],
9491 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9492 }
9493 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009494 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009495
Thierry Strudel3d639192016-09-09 11:52:26 -07009496 break;
9497 }
9498 }
9499
9500 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9501 available_stream_configs.array(), available_stream_configs.size());
9502 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9503 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9504
9505 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9506 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9507
9508 /* android.scaler.availableMinFrameDurations */
9509 Vector<int64_t> available_min_durations;
9510 for (size_t j = 0; j < scalar_formats_count; j++) {
9511 switch (scalar_formats[j]) {
9512 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9513 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9514 case HAL_PIXEL_FORMAT_RAW10:
9515 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9516 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9517 available_min_durations.add(scalar_formats[j]);
9518 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9519 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9520 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9521 }
9522 break;
9523 default:
9524 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9525 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9526 available_min_durations.add(scalar_formats[j]);
9527 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9528 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9529 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9530 }
9531 break;
9532 }
9533 }
9534 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9535 available_min_durations.array(), available_min_durations.size());
9536
9537 Vector<int32_t> available_hfr_configs;
9538 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9539 int32_t fps = 0;
9540 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9541 case CAM_HFR_MODE_60FPS:
9542 fps = 60;
9543 break;
9544 case CAM_HFR_MODE_90FPS:
9545 fps = 90;
9546 break;
9547 case CAM_HFR_MODE_120FPS:
9548 fps = 120;
9549 break;
9550 case CAM_HFR_MODE_150FPS:
9551 fps = 150;
9552 break;
9553 case CAM_HFR_MODE_180FPS:
9554 fps = 180;
9555 break;
9556 case CAM_HFR_MODE_210FPS:
9557 fps = 210;
9558 break;
9559 case CAM_HFR_MODE_240FPS:
9560 fps = 240;
9561 break;
9562 case CAM_HFR_MODE_480FPS:
9563 fps = 480;
9564 break;
9565 case CAM_HFR_MODE_OFF:
9566 case CAM_HFR_MODE_MAX:
9567 default:
9568 break;
9569 }
9570
9571 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9572 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9573 /* For each HFR frame rate, need to advertise one variable fps range
9574 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9575 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9576 * set by the app. When video recording is started, [120, 120] is
9577 * set. This way sensor configuration does not change when recording
9578 * is started */
9579
9580 /* (width, height, fps_min, fps_max, batch_size_max) */
9581 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9582 j < MAX_SIZES_CNT; j++) {
9583 available_hfr_configs.add(
9584 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9585 available_hfr_configs.add(
9586 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9587 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9588 available_hfr_configs.add(fps);
9589 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9590
9591 /* (width, height, fps_min, fps_max, batch_size_max) */
9592 available_hfr_configs.add(
9593 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9594 available_hfr_configs.add(
9595 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9596 available_hfr_configs.add(fps);
9597 available_hfr_configs.add(fps);
9598 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9599 }
9600 }
9601 }
9602 //Advertise HFR capability only if the property is set
9603 memset(prop, 0, sizeof(prop));
9604 property_get("persist.camera.hal3hfr.enable", prop, "1");
9605 uint8_t hfrEnable = (uint8_t)atoi(prop);
9606
9607 if(hfrEnable && available_hfr_configs.array()) {
9608 staticInfo.update(
9609 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9610 available_hfr_configs.array(), available_hfr_configs.size());
9611 }
9612
9613 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9614 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9615 &max_jpeg_size, 1);
9616
9617 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9618 size_t size = 0;
9619 count = CAM_EFFECT_MODE_MAX;
9620 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9621 for (size_t i = 0; i < count; i++) {
9622 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9623 gCamCapability[cameraId]->supported_effects[i]);
9624 if (NAME_NOT_FOUND != val) {
9625 avail_effects[size] = (uint8_t)val;
9626 size++;
9627 }
9628 }
9629 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9630 avail_effects,
9631 size);
9632
9633 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9634 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9635 size_t supported_scene_modes_cnt = 0;
9636 count = CAM_SCENE_MODE_MAX;
9637 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9638 for (size_t i = 0; i < count; i++) {
9639 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9640 CAM_SCENE_MODE_OFF) {
9641 int val = lookupFwkName(SCENE_MODES_MAP,
9642 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9643 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009644
Thierry Strudel3d639192016-09-09 11:52:26 -07009645 if (NAME_NOT_FOUND != val) {
9646 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9647 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9648 supported_scene_modes_cnt++;
9649 }
9650 }
9651 }
9652 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9653 avail_scene_modes,
9654 supported_scene_modes_cnt);
9655
9656 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9657 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9658 supported_scene_modes_cnt,
9659 CAM_SCENE_MODE_MAX,
9660 scene_mode_overrides,
9661 supported_indexes,
9662 cameraId);
9663
9664 if (supported_scene_modes_cnt == 0) {
9665 supported_scene_modes_cnt = 1;
9666 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9667 }
9668
9669 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9670 scene_mode_overrides, supported_scene_modes_cnt * 3);
9671
9672 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9673 ANDROID_CONTROL_MODE_AUTO,
9674 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9675 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9676 available_control_modes,
9677 3);
9678
9679 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9680 size = 0;
9681 count = CAM_ANTIBANDING_MODE_MAX;
9682 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9683 for (size_t i = 0; i < count; i++) {
9684 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9685 gCamCapability[cameraId]->supported_antibandings[i]);
9686 if (NAME_NOT_FOUND != val) {
9687 avail_antibanding_modes[size] = (uint8_t)val;
9688 size++;
9689 }
9690
9691 }
9692 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9693 avail_antibanding_modes,
9694 size);
9695
9696 uint8_t avail_abberation_modes[] = {
9697 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9698 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9699 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9700 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9701 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9702 if (0 == count) {
9703 // If no aberration correction modes are available for a device, this advertise OFF mode
9704 size = 1;
9705 } else {
9706 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9707 // So, advertize all 3 modes if atleast any one mode is supported as per the
9708 // new M requirement
9709 size = 3;
9710 }
9711 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9712 avail_abberation_modes,
9713 size);
9714
9715 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9716 size = 0;
9717 count = CAM_FOCUS_MODE_MAX;
9718 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9719 for (size_t i = 0; i < count; i++) {
9720 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9721 gCamCapability[cameraId]->supported_focus_modes[i]);
9722 if (NAME_NOT_FOUND != val) {
9723 avail_af_modes[size] = (uint8_t)val;
9724 size++;
9725 }
9726 }
9727 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9728 avail_af_modes,
9729 size);
9730
9731 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9732 size = 0;
9733 count = CAM_WB_MODE_MAX;
9734 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9735 for (size_t i = 0; i < count; i++) {
9736 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9737 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9738 gCamCapability[cameraId]->supported_white_balances[i]);
9739 if (NAME_NOT_FOUND != val) {
9740 avail_awb_modes[size] = (uint8_t)val;
9741 size++;
9742 }
9743 }
9744 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9745 avail_awb_modes,
9746 size);
9747
9748 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9749 count = CAM_FLASH_FIRING_LEVEL_MAX;
9750 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9751 count);
9752 for (size_t i = 0; i < count; i++) {
9753 available_flash_levels[i] =
9754 gCamCapability[cameraId]->supported_firing_levels[i];
9755 }
9756 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9757 available_flash_levels, count);
9758
9759 uint8_t flashAvailable;
9760 if (gCamCapability[cameraId]->flash_available)
9761 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9762 else
9763 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9764 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9765 &flashAvailable, 1);
9766
9767 Vector<uint8_t> avail_ae_modes;
9768 count = CAM_AE_MODE_MAX;
9769 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9770 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009771 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9772 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9773 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9774 }
9775 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009776 }
9777 if (flashAvailable) {
9778 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9779 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9780 }
9781 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9782 avail_ae_modes.array(),
9783 avail_ae_modes.size());
9784
9785 int32_t sensitivity_range[2];
9786 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9787 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9788 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9789 sensitivity_range,
9790 sizeof(sensitivity_range) / sizeof(int32_t));
9791
9792 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9793 &gCamCapability[cameraId]->max_analog_sensitivity,
9794 1);
9795
9796 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9797 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9798 &sensor_orientation,
9799 1);
9800
9801 int32_t max_output_streams[] = {
9802 MAX_STALLING_STREAMS,
9803 MAX_PROCESSED_STREAMS,
9804 MAX_RAW_STREAMS};
9805 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9806 max_output_streams,
9807 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9808
9809 uint8_t avail_leds = 0;
9810 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9811 &avail_leds, 0);
9812
9813 uint8_t focus_dist_calibrated;
9814 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9815 gCamCapability[cameraId]->focus_dist_calibrated);
9816 if (NAME_NOT_FOUND != val) {
9817 focus_dist_calibrated = (uint8_t)val;
9818 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9819 &focus_dist_calibrated, 1);
9820 }
9821
9822 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9823 size = 0;
9824 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9825 MAX_TEST_PATTERN_CNT);
9826 for (size_t i = 0; i < count; i++) {
9827 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9828 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9829 if (NAME_NOT_FOUND != testpatternMode) {
9830 avail_testpattern_modes[size] = testpatternMode;
9831 size++;
9832 }
9833 }
9834 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9835 avail_testpattern_modes,
9836 size);
9837
9838 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9839 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9840 &max_pipeline_depth,
9841 1);
9842
9843 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9844 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9845 &partial_result_count,
9846 1);
9847
9848 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9849 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9850
9851 Vector<uint8_t> available_capabilities;
9852 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9853 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9854 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9855 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9856 if (supportBurst) {
9857 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9858 }
9859 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9860 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9861 if (hfrEnable && available_hfr_configs.array()) {
9862 available_capabilities.add(
9863 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9864 }
9865
9866 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9867 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9868 }
9869 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9870 available_capabilities.array(),
9871 available_capabilities.size());
9872
9873 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9874 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9875 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9876 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9877
9878 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9879 &aeLockAvailable, 1);
9880
9881 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9882 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9883 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9884 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9885
9886 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9887 &awbLockAvailable, 1);
9888
9889 int32_t max_input_streams = 1;
9890 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9891 &max_input_streams,
9892 1);
9893
9894 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9895 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9896 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9897 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9898 HAL_PIXEL_FORMAT_YCbCr_420_888};
9899 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9900 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9901
9902 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9903 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9904 &max_latency,
9905 1);
9906
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009907#ifndef USE_HAL_3_3
9908 int32_t isp_sensitivity_range[2];
9909 isp_sensitivity_range[0] =
9910 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9911 isp_sensitivity_range[1] =
9912 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9913 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9914 isp_sensitivity_range,
9915 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9916#endif
9917
Thierry Strudel3d639192016-09-09 11:52:26 -07009918 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9919 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9920 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9921 available_hot_pixel_modes,
9922 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9923
9924 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9925 ANDROID_SHADING_MODE_FAST,
9926 ANDROID_SHADING_MODE_HIGH_QUALITY};
9927 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9928 available_shading_modes,
9929 3);
9930
9931 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9932 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9933 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9934 available_lens_shading_map_modes,
9935 2);
9936
9937 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9938 ANDROID_EDGE_MODE_FAST,
9939 ANDROID_EDGE_MODE_HIGH_QUALITY,
9940 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9941 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9942 available_edge_modes,
9943 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9944
9945 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9946 ANDROID_NOISE_REDUCTION_MODE_FAST,
9947 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9948 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9949 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9950 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9951 available_noise_red_modes,
9952 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9953
9954 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9955 ANDROID_TONEMAP_MODE_FAST,
9956 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9957 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9958 available_tonemap_modes,
9959 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9960
9961 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9962 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9963 available_hot_pixel_map_modes,
9964 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9965
9966 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9967 gCamCapability[cameraId]->reference_illuminant1);
9968 if (NAME_NOT_FOUND != val) {
9969 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9970 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9971 }
9972
9973 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9974 gCamCapability[cameraId]->reference_illuminant2);
9975 if (NAME_NOT_FOUND != val) {
9976 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9977 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9978 }
9979
9980 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9981 (void *)gCamCapability[cameraId]->forward_matrix1,
9982 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9983
9984 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9985 (void *)gCamCapability[cameraId]->forward_matrix2,
9986 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9987
9988 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9989 (void *)gCamCapability[cameraId]->color_transform1,
9990 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9991
9992 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9993 (void *)gCamCapability[cameraId]->color_transform2,
9994 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9995
9996 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9997 (void *)gCamCapability[cameraId]->calibration_transform1,
9998 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9999
10000 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10001 (void *)gCamCapability[cameraId]->calibration_transform2,
10002 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10003
10004 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10005 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10006 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10007 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10008 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10009 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10010 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10011 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10012 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10013 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10014 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10015 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10016 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10017 ANDROID_JPEG_GPS_COORDINATES,
10018 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10019 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10020 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10021 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10022 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10023 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10024 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10025 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10026 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10027 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010028#ifndef USE_HAL_3_3
10029 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10030#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010031 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010032 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010033 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10034 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010035 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010036 /* DevCamDebug metadata request_keys_basic */
10037 DEVCAMDEBUG_META_ENABLE,
10038 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010039 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010040 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010041 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010042 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -080010043 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010044
10045 size_t request_keys_cnt =
10046 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10047 Vector<int32_t> available_request_keys;
10048 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10049 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10050 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10051 }
10052
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010053 if (gExposeEnableZslKey) {
Chien-Yu Chened0a4c92017-05-01 18:25:03 +000010054 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010055 }
10056
Thierry Strudel3d639192016-09-09 11:52:26 -070010057 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10058 available_request_keys.array(), available_request_keys.size());
10059
10060 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10061 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10062 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10063 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10064 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10065 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10066 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10067 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10068 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10069 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10070 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10071 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10072 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10073 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10074 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10075 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10076 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010077 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010078 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10079 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10080 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010081 ANDROID_STATISTICS_FACE_SCORES,
10082#ifndef USE_HAL_3_3
10083 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10084#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010085 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010086 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010087 // DevCamDebug metadata result_keys_basic
10088 DEVCAMDEBUG_META_ENABLE,
10089 // DevCamDebug metadata result_keys AF
10090 DEVCAMDEBUG_AF_LENS_POSITION,
10091 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10092 DEVCAMDEBUG_AF_TOF_DISTANCE,
10093 DEVCAMDEBUG_AF_LUMA,
10094 DEVCAMDEBUG_AF_HAF_STATE,
10095 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10096 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10097 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10098 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10099 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10100 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10101 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10102 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10103 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10104 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10105 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10106 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10107 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10108 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10109 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10110 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10111 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10112 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10113 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10114 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10115 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10116 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10117 // DevCamDebug metadata result_keys AEC
10118 DEVCAMDEBUG_AEC_TARGET_LUMA,
10119 DEVCAMDEBUG_AEC_COMP_LUMA,
10120 DEVCAMDEBUG_AEC_AVG_LUMA,
10121 DEVCAMDEBUG_AEC_CUR_LUMA,
10122 DEVCAMDEBUG_AEC_LINECOUNT,
10123 DEVCAMDEBUG_AEC_REAL_GAIN,
10124 DEVCAMDEBUG_AEC_EXP_INDEX,
10125 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010126 // DevCamDebug metadata result_keys zzHDR
10127 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10128 DEVCAMDEBUG_AEC_L_LINECOUNT,
10129 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10130 DEVCAMDEBUG_AEC_S_LINECOUNT,
10131 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10132 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10133 // DevCamDebug metadata result_keys ADRC
10134 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10135 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10136 DEVCAMDEBUG_AEC_GTM_RATIO,
10137 DEVCAMDEBUG_AEC_LTM_RATIO,
10138 DEVCAMDEBUG_AEC_LA_RATIO,
10139 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010140 // DevCamDebug metadata result_keys AWB
10141 DEVCAMDEBUG_AWB_R_GAIN,
10142 DEVCAMDEBUG_AWB_G_GAIN,
10143 DEVCAMDEBUG_AWB_B_GAIN,
10144 DEVCAMDEBUG_AWB_CCT,
10145 DEVCAMDEBUG_AWB_DECISION,
10146 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010147 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10148 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10149 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010150 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010151 };
10152
Thierry Strudel3d639192016-09-09 11:52:26 -070010153 size_t result_keys_cnt =
10154 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10155
10156 Vector<int32_t> available_result_keys;
10157 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10158 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10159 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10160 }
10161 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10162 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10163 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10164 }
10165 if (supportedFaceDetectMode == 1) {
10166 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10167 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10168 } else if ((supportedFaceDetectMode == 2) ||
10169 (supportedFaceDetectMode == 3)) {
10170 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10171 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10172 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010173#ifndef USE_HAL_3_3
10174 if (hasBlackRegions) {
10175 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10176 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10177 }
10178#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010179
10180 if (gExposeEnableZslKey) {
10181 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10182 }
10183
Thierry Strudel3d639192016-09-09 11:52:26 -070010184 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10185 available_result_keys.array(), available_result_keys.size());
10186
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010187 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010188 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10189 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10190 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10191 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10192 ANDROID_SCALER_CROPPING_TYPE,
10193 ANDROID_SYNC_MAX_LATENCY,
10194 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10195 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10196 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10197 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10198 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10199 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10200 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10201 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10202 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10203 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10204 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10205 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10206 ANDROID_LENS_FACING,
10207 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10208 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10209 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10210 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10211 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10212 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10213 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10214 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10215 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10216 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10217 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10218 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10219 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10220 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10221 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10222 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10223 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10224 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10225 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10226 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010227 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010228 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10229 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10230 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10231 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10232 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10233 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10234 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10235 ANDROID_CONTROL_AVAILABLE_MODES,
10236 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10237 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10238 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10239 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010240 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10241#ifndef USE_HAL_3_3
10242 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10243 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10244#endif
10245 };
10246
10247 Vector<int32_t> available_characteristics_keys;
10248 available_characteristics_keys.appendArray(characteristics_keys_basic,
10249 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10250#ifndef USE_HAL_3_3
10251 if (hasBlackRegions) {
10252 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10253 }
10254#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010255
10256 if (0 <= indexPD) {
10257 int32_t depthKeys[] = {
10258 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10259 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10260 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10261 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10262 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10263 };
10264 available_characteristics_keys.appendArray(depthKeys,
10265 sizeof(depthKeys) / sizeof(depthKeys[0]));
10266 }
10267
Thierry Strudel3d639192016-09-09 11:52:26 -070010268 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010269 available_characteristics_keys.array(),
10270 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010271
10272 /*available stall durations depend on the hw + sw and will be different for different devices */
10273 /*have to add for raw after implementation*/
10274 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10275 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10276
10277 Vector<int64_t> available_stall_durations;
10278 for (uint32_t j = 0; j < stall_formats_count; j++) {
10279 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10280 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10281 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10282 available_stall_durations.add(stall_formats[j]);
10283 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10284 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10285 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10286 }
10287 } else {
10288 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10289 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10290 available_stall_durations.add(stall_formats[j]);
10291 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10292 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10293 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10294 }
10295 }
10296 }
10297 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10298 available_stall_durations.array(),
10299 available_stall_durations.size());
10300
10301 //QCAMERA3_OPAQUE_RAW
10302 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10303 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10304 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10305 case LEGACY_RAW:
10306 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10307 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10308 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10309 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10310 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10311 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10312 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10313 break;
10314 case MIPI_RAW:
10315 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10316 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10317 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10318 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10319 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10320 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10321 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10322 break;
10323 default:
10324 LOGE("unknown opaque_raw_format %d",
10325 gCamCapability[cameraId]->opaque_raw_fmt);
10326 break;
10327 }
10328 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10329
10330 Vector<int32_t> strides;
10331 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10332 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10333 cam_stream_buf_plane_info_t buf_planes;
10334 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10335 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10336 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10337 &gCamCapability[cameraId]->padding_info, &buf_planes);
10338 strides.add(buf_planes.plane_info.mp[0].stride);
10339 }
10340 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10341 strides.size());
10342
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010343 //TBD: remove the following line once backend advertises zzHDR in feature mask
10344 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010345 //Video HDR default
10346 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10347 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010348 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010349 int32_t vhdr_mode[] = {
10350 QCAMERA3_VIDEO_HDR_MODE_OFF,
10351 QCAMERA3_VIDEO_HDR_MODE_ON};
10352
10353 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10354 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10355 vhdr_mode, vhdr_mode_count);
10356 }
10357
Thierry Strudel3d639192016-09-09 11:52:26 -070010358 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10359 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10360 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10361
10362 uint8_t isMonoOnly =
10363 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10364 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10365 &isMonoOnly, 1);
10366
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010367#ifndef USE_HAL_3_3
10368 Vector<int32_t> opaque_size;
10369 for (size_t j = 0; j < scalar_formats_count; j++) {
10370 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10371 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10372 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10373 cam_stream_buf_plane_info_t buf_planes;
10374
10375 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10376 &gCamCapability[cameraId]->padding_info, &buf_planes);
10377
10378 if (rc == 0) {
10379 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10380 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10381 opaque_size.add(buf_planes.plane_info.frame_len);
10382 }else {
10383 LOGE("raw frame calculation failed!");
10384 }
10385 }
10386 }
10387 }
10388
10389 if ((opaque_size.size() > 0) &&
10390 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10391 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10392 else
10393 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10394#endif
10395
Thierry Strudel04e026f2016-10-10 11:27:36 -070010396 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10397 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10398 size = 0;
10399 count = CAM_IR_MODE_MAX;
10400 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10401 for (size_t i = 0; i < count; i++) {
10402 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10403 gCamCapability[cameraId]->supported_ir_modes[i]);
10404 if (NAME_NOT_FOUND != val) {
10405 avail_ir_modes[size] = (int32_t)val;
10406 size++;
10407 }
10408 }
10409 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10410 avail_ir_modes, size);
10411 }
10412
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010413 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10414 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10415 size = 0;
10416 count = CAM_AEC_CONVERGENCE_MAX;
10417 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10418 for (size_t i = 0; i < count; i++) {
10419 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10420 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10421 if (NAME_NOT_FOUND != val) {
10422 available_instant_aec_modes[size] = (int32_t)val;
10423 size++;
10424 }
10425 }
10426 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10427 available_instant_aec_modes, size);
10428 }
10429
Thierry Strudel54dc9782017-02-15 12:12:10 -080010430 int32_t sharpness_range[] = {
10431 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10432 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10433 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10434
10435 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10436 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10437 size = 0;
10438 count = CAM_BINNING_CORRECTION_MODE_MAX;
10439 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10440 for (size_t i = 0; i < count; i++) {
10441 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10442 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10443 gCamCapability[cameraId]->supported_binning_modes[i]);
10444 if (NAME_NOT_FOUND != val) {
10445 avail_binning_modes[size] = (int32_t)val;
10446 size++;
10447 }
10448 }
10449 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10450 avail_binning_modes, size);
10451 }
10452
10453 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10454 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10455 size = 0;
10456 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10457 for (size_t i = 0; i < count; i++) {
10458 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10459 gCamCapability[cameraId]->supported_aec_modes[i]);
10460 if (NAME_NOT_FOUND != val)
10461 available_aec_modes[size++] = val;
10462 }
10463 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10464 available_aec_modes, size);
10465 }
10466
10467 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10468 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10469 size = 0;
10470 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10471 for (size_t i = 0; i < count; i++) {
10472 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10473 gCamCapability[cameraId]->supported_iso_modes[i]);
10474 if (NAME_NOT_FOUND != val)
10475 available_iso_modes[size++] = val;
10476 }
10477 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10478 available_iso_modes, size);
10479 }
10480
10481 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010482 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010483 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10484 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10485 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10486
10487 int32_t available_saturation_range[4];
10488 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10489 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10490 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10491 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10492 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10493 available_saturation_range, 4);
10494
10495 uint8_t is_hdr_values[2];
10496 is_hdr_values[0] = 0;
10497 is_hdr_values[1] = 1;
10498 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10499 is_hdr_values, 2);
10500
10501 float is_hdr_confidence_range[2];
10502 is_hdr_confidence_range[0] = 0.0;
10503 is_hdr_confidence_range[1] = 1.0;
10504 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10505 is_hdr_confidence_range, 2);
10506
Emilian Peev0a972ef2017-03-16 10:25:53 +000010507 size_t eepromLength = strnlen(
10508 reinterpret_cast<const char *>(
10509 gCamCapability[cameraId]->eeprom_version_info),
10510 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10511 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010512 char easelInfo[] = ",E:N";
10513 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10514 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10515 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010516 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10517 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010518 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010519 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10520 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10521 }
10522
Thierry Strudel3d639192016-09-09 11:52:26 -070010523 gStaticMetadata[cameraId] = staticInfo.release();
10524 return rc;
10525}
10526
10527/*===========================================================================
10528 * FUNCTION : makeTable
10529 *
10530 * DESCRIPTION: make a table of sizes
10531 *
10532 * PARAMETERS :
10533 *
10534 *
10535 *==========================================================================*/
10536void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10537 size_t max_size, int32_t *sizeTable)
10538{
10539 size_t j = 0;
10540 if (size > max_size) {
10541 size = max_size;
10542 }
10543 for (size_t i = 0; i < size; i++) {
10544 sizeTable[j] = dimTable[i].width;
10545 sizeTable[j+1] = dimTable[i].height;
10546 j+=2;
10547 }
10548}
10549
10550/*===========================================================================
10551 * FUNCTION : makeFPSTable
10552 *
10553 * DESCRIPTION: make a table of fps ranges
10554 *
10555 * PARAMETERS :
10556 *
10557 *==========================================================================*/
10558void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10559 size_t max_size, int32_t *fpsRangesTable)
10560{
10561 size_t j = 0;
10562 if (size > max_size) {
10563 size = max_size;
10564 }
10565 for (size_t i = 0; i < size; i++) {
10566 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10567 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10568 j+=2;
10569 }
10570}
10571
10572/*===========================================================================
10573 * FUNCTION : makeOverridesList
10574 *
10575 * DESCRIPTION: make a list of scene mode overrides
10576 *
10577 * PARAMETERS :
10578 *
10579 *
10580 *==========================================================================*/
10581void QCamera3HardwareInterface::makeOverridesList(
10582 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10583 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10584{
10585 /*daemon will give a list of overrides for all scene modes.
10586 However we should send the fwk only the overrides for the scene modes
10587 supported by the framework*/
10588 size_t j = 0;
10589 if (size > max_size) {
10590 size = max_size;
10591 }
10592 size_t focus_count = CAM_FOCUS_MODE_MAX;
10593 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10594 focus_count);
10595 for (size_t i = 0; i < size; i++) {
10596 bool supt = false;
10597 size_t index = supported_indexes[i];
10598 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10599 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10600 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10601 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10602 overridesTable[index].awb_mode);
10603 if (NAME_NOT_FOUND != val) {
10604 overridesList[j+1] = (uint8_t)val;
10605 }
10606 uint8_t focus_override = overridesTable[index].af_mode;
10607 for (size_t k = 0; k < focus_count; k++) {
10608 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10609 supt = true;
10610 break;
10611 }
10612 }
10613 if (supt) {
10614 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10615 focus_override);
10616 if (NAME_NOT_FOUND != val) {
10617 overridesList[j+2] = (uint8_t)val;
10618 }
10619 } else {
10620 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10621 }
10622 j+=3;
10623 }
10624}
10625
10626/*===========================================================================
10627 * FUNCTION : filterJpegSizes
10628 *
10629 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10630 * could be downscaled to
10631 *
10632 * PARAMETERS :
10633 *
10634 * RETURN : length of jpegSizes array
10635 *==========================================================================*/
10636
10637size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10638 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10639 uint8_t downscale_factor)
10640{
10641 if (0 == downscale_factor) {
10642 downscale_factor = 1;
10643 }
10644
10645 int32_t min_width = active_array_size.width / downscale_factor;
10646 int32_t min_height = active_array_size.height / downscale_factor;
10647 size_t jpegSizesCnt = 0;
10648 if (processedSizesCnt > maxCount) {
10649 processedSizesCnt = maxCount;
10650 }
10651 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10652 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10653 jpegSizes[jpegSizesCnt] = processedSizes[i];
10654 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10655 jpegSizesCnt += 2;
10656 }
10657 }
10658 return jpegSizesCnt;
10659}
10660
10661/*===========================================================================
10662 * FUNCTION : computeNoiseModelEntryS
10663 *
10664 * DESCRIPTION: function to map a given sensitivity to the S noise
10665 * model parameters in the DNG noise model.
10666 *
10667 * PARAMETERS : sens : the sensor sensitivity
10668 *
10669 ** RETURN : S (sensor amplification) noise
10670 *
10671 *==========================================================================*/
10672double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10673 double s = gCamCapability[mCameraId]->gradient_S * sens +
10674 gCamCapability[mCameraId]->offset_S;
10675 return ((s < 0.0) ? 0.0 : s);
10676}
10677
10678/*===========================================================================
10679 * FUNCTION : computeNoiseModelEntryO
10680 *
10681 * DESCRIPTION: function to map a given sensitivity to the O noise
10682 * model parameters in the DNG noise model.
10683 *
10684 * PARAMETERS : sens : the sensor sensitivity
10685 *
10686 ** RETURN : O (sensor readout) noise
10687 *
10688 *==========================================================================*/
10689double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10690 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10691 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10692 1.0 : (1.0 * sens / max_analog_sens);
10693 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10694 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10695 return ((o < 0.0) ? 0.0 : o);
10696}
10697
10698/*===========================================================================
10699 * FUNCTION : getSensorSensitivity
10700 *
10701 * DESCRIPTION: convert iso_mode to an integer value
10702 *
10703 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10704 *
10705 ** RETURN : sensitivity supported by sensor
10706 *
10707 *==========================================================================*/
10708int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10709{
10710 int32_t sensitivity;
10711
10712 switch (iso_mode) {
10713 case CAM_ISO_MODE_100:
10714 sensitivity = 100;
10715 break;
10716 case CAM_ISO_MODE_200:
10717 sensitivity = 200;
10718 break;
10719 case CAM_ISO_MODE_400:
10720 sensitivity = 400;
10721 break;
10722 case CAM_ISO_MODE_800:
10723 sensitivity = 800;
10724 break;
10725 case CAM_ISO_MODE_1600:
10726 sensitivity = 1600;
10727 break;
10728 default:
10729 sensitivity = -1;
10730 break;
10731 }
10732 return sensitivity;
10733}
10734
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010735int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010736 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010737 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10738 // to connect to Easel.
10739 bool doNotpowerOnEasel =
10740 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10741
10742 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010743 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10744 return OK;
10745 }
10746
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010747 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010748 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010749 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010750 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010751 return res;
10752 }
10753
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010754 EaselManagerClientOpened = true;
10755
10756 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010757 if (res != OK) {
10758 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10759 }
10760
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010761 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010762 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010763
10764 // Expose enableZsl key only when HDR+ mode is enabled.
10765 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010766 }
10767
10768 return OK;
10769}
10770
Thierry Strudel3d639192016-09-09 11:52:26 -070010771/*===========================================================================
10772 * FUNCTION : getCamInfo
10773 *
10774 * DESCRIPTION: query camera capabilities
10775 *
10776 * PARAMETERS :
10777 * @cameraId : camera Id
10778 * @info : camera info struct to be filled in with camera capabilities
10779 *
10780 * RETURN : int type of status
10781 * NO_ERROR -- success
10782 * none-zero failure code
10783 *==========================================================================*/
10784int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10785 struct camera_info *info)
10786{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010787 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010788 int rc = 0;
10789
10790 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010791
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010792 {
10793 Mutex::Autolock l(gHdrPlusClientLock);
10794 rc = initHdrPlusClientLocked();
10795 if (rc != OK) {
10796 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10797 pthread_mutex_unlock(&gCamLock);
10798 return rc;
10799 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010800 }
10801
Thierry Strudel3d639192016-09-09 11:52:26 -070010802 if (NULL == gCamCapability[cameraId]) {
10803 rc = initCapabilities(cameraId);
10804 if (rc < 0) {
10805 pthread_mutex_unlock(&gCamLock);
10806 return rc;
10807 }
10808 }
10809
10810 if (NULL == gStaticMetadata[cameraId]) {
10811 rc = initStaticMetadata(cameraId);
10812 if (rc < 0) {
10813 pthread_mutex_unlock(&gCamLock);
10814 return rc;
10815 }
10816 }
10817
10818 switch(gCamCapability[cameraId]->position) {
10819 case CAM_POSITION_BACK:
10820 case CAM_POSITION_BACK_AUX:
10821 info->facing = CAMERA_FACING_BACK;
10822 break;
10823
10824 case CAM_POSITION_FRONT:
10825 case CAM_POSITION_FRONT_AUX:
10826 info->facing = CAMERA_FACING_FRONT;
10827 break;
10828
10829 default:
10830 LOGE("Unknown position type %d for camera id:%d",
10831 gCamCapability[cameraId]->position, cameraId);
10832 rc = -1;
10833 break;
10834 }
10835
10836
10837 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010838#ifndef USE_HAL_3_3
10839 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10840#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010841 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010842#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010843 info->static_camera_characteristics = gStaticMetadata[cameraId];
10844
10845 //For now assume both cameras can operate independently.
10846 info->conflicting_devices = NULL;
10847 info->conflicting_devices_length = 0;
10848
10849 //resource cost is 100 * MIN(1.0, m/M),
10850 //where m is throughput requirement with maximum stream configuration
10851 //and M is CPP maximum throughput.
10852 float max_fps = 0.0;
10853 for (uint32_t i = 0;
10854 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10855 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10856 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10857 }
10858 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10859 gCamCapability[cameraId]->active_array_size.width *
10860 gCamCapability[cameraId]->active_array_size.height * max_fps /
10861 gCamCapability[cameraId]->max_pixel_bandwidth;
10862 info->resource_cost = 100 * MIN(1.0, ratio);
10863 LOGI("camera %d resource cost is %d", cameraId,
10864 info->resource_cost);
10865
10866 pthread_mutex_unlock(&gCamLock);
10867 return rc;
10868}
10869
10870/*===========================================================================
10871 * FUNCTION : translateCapabilityToMetadata
10872 *
10873 * DESCRIPTION: translate the capability into camera_metadata_t
10874 *
10875 * PARAMETERS : type of the request
10876 *
10877 *
10878 * RETURN : success: camera_metadata_t*
10879 * failure: NULL
10880 *
10881 *==========================================================================*/
10882camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10883{
10884 if (mDefaultMetadata[type] != NULL) {
10885 return mDefaultMetadata[type];
10886 }
10887 //first time we are handling this request
10888 //fill up the metadata structure using the wrapper class
10889 CameraMetadata settings;
10890 //translate from cam_capability_t to camera_metadata_tag_t
10891 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10892 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10893 int32_t defaultRequestID = 0;
10894 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10895
10896 /* OIS disable */
10897 char ois_prop[PROPERTY_VALUE_MAX];
10898 memset(ois_prop, 0, sizeof(ois_prop));
10899 property_get("persist.camera.ois.disable", ois_prop, "0");
10900 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10901
10902 /* Force video to use OIS */
10903 char videoOisProp[PROPERTY_VALUE_MAX];
10904 memset(videoOisProp, 0, sizeof(videoOisProp));
10905 property_get("persist.camera.ois.video", videoOisProp, "1");
10906 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010907
10908 // Hybrid AE enable/disable
10909 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10910 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10911 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10912 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10913
Thierry Strudel3d639192016-09-09 11:52:26 -070010914 uint8_t controlIntent = 0;
10915 uint8_t focusMode;
10916 uint8_t vsMode;
10917 uint8_t optStabMode;
10918 uint8_t cacMode;
10919 uint8_t edge_mode;
10920 uint8_t noise_red_mode;
10921 uint8_t tonemap_mode;
10922 bool highQualityModeEntryAvailable = FALSE;
10923 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010924 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010925 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10926 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010927 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010928 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010929 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010930
Thierry Strudel3d639192016-09-09 11:52:26 -070010931 switch (type) {
10932 case CAMERA3_TEMPLATE_PREVIEW:
10933 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10934 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10935 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10936 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10937 edge_mode = ANDROID_EDGE_MODE_FAST;
10938 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10939 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10940 break;
10941 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10942 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10943 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10944 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10945 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10946 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10947 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10948 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10949 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10950 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10951 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10952 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10953 highQualityModeEntryAvailable = TRUE;
10954 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10955 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10956 fastModeEntryAvailable = TRUE;
10957 }
10958 }
10959 if (highQualityModeEntryAvailable) {
10960 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10961 } else if (fastModeEntryAvailable) {
10962 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10963 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010964 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10965 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10966 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010967 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010968 break;
10969 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10970 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10971 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10972 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010973 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10974 edge_mode = ANDROID_EDGE_MODE_FAST;
10975 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10976 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10977 if (forceVideoOis)
10978 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10979 break;
10980 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10981 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10982 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10983 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010984 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10985 edge_mode = ANDROID_EDGE_MODE_FAST;
10986 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10987 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10988 if (forceVideoOis)
10989 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10990 break;
10991 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10992 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10993 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10994 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10995 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10996 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10997 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10998 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10999 break;
11000 case CAMERA3_TEMPLATE_MANUAL:
11001 edge_mode = ANDROID_EDGE_MODE_FAST;
11002 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11003 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11004 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11005 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11006 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11007 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11008 break;
11009 default:
11010 edge_mode = ANDROID_EDGE_MODE_FAST;
11011 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11012 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11013 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11014 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11015 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11016 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11017 break;
11018 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011019 // Set CAC to OFF if underlying device doesn't support
11020 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11021 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11022 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011023 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11024 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11025 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11026 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11027 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11028 }
11029 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011030 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011031 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011032
11033 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11034 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11035 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11036 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11037 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11038 || ois_disable)
11039 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11040 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011041 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011042
11043 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11044 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11045
11046 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11047 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11048
11049 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11050 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11051
11052 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11053 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11054
11055 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11056 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11057
11058 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11059 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11060
11061 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11062 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11063
11064 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11065 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11066
11067 /*flash*/
11068 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11069 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11070
11071 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11072 settings.update(ANDROID_FLASH_FIRING_POWER,
11073 &flashFiringLevel, 1);
11074
11075 /* lens */
11076 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11077 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11078
11079 if (gCamCapability[mCameraId]->filter_densities_count) {
11080 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11081 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11082 gCamCapability[mCameraId]->filter_densities_count);
11083 }
11084
11085 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11086 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11087
Thierry Strudel3d639192016-09-09 11:52:26 -070011088 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11089 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11090
11091 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11092 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11093
11094 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11095 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11096
11097 /* face detection (default to OFF) */
11098 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11099 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11100
Thierry Strudel54dc9782017-02-15 12:12:10 -080011101 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11102 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011103
11104 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11105 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11106
11107 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11108 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11109
Thierry Strudel3d639192016-09-09 11:52:26 -070011110
11111 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11112 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11113
11114 /* Exposure time(Update the Min Exposure Time)*/
11115 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11116 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11117
11118 /* frame duration */
11119 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11120 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11121
11122 /* sensitivity */
11123 static const int32_t default_sensitivity = 100;
11124 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011125#ifndef USE_HAL_3_3
11126 static const int32_t default_isp_sensitivity =
11127 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11128 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11129#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011130
11131 /*edge mode*/
11132 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11133
11134 /*noise reduction mode*/
11135 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11136
11137 /*color correction mode*/
11138 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11139 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11140
11141 /*transform matrix mode*/
11142 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11143
11144 int32_t scaler_crop_region[4];
11145 scaler_crop_region[0] = 0;
11146 scaler_crop_region[1] = 0;
11147 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11148 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11149 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11150
11151 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11152 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11153
11154 /*focus distance*/
11155 float focus_distance = 0.0;
11156 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11157
11158 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011159 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011160 float max_range = 0.0;
11161 float max_fixed_fps = 0.0;
11162 int32_t fps_range[2] = {0, 0};
11163 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11164 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011165 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11166 TEMPLATE_MAX_PREVIEW_FPS) {
11167 continue;
11168 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011169 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11170 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11171 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11172 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11173 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11174 if (range > max_range) {
11175 fps_range[0] =
11176 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11177 fps_range[1] =
11178 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11179 max_range = range;
11180 }
11181 } else {
11182 if (range < 0.01 && max_fixed_fps <
11183 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11184 fps_range[0] =
11185 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11186 fps_range[1] =
11187 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11188 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11189 }
11190 }
11191 }
11192 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11193
11194 /*precapture trigger*/
11195 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11196 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11197
11198 /*af trigger*/
11199 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11200 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11201
11202 /* ae & af regions */
11203 int32_t active_region[] = {
11204 gCamCapability[mCameraId]->active_array_size.left,
11205 gCamCapability[mCameraId]->active_array_size.top,
11206 gCamCapability[mCameraId]->active_array_size.left +
11207 gCamCapability[mCameraId]->active_array_size.width,
11208 gCamCapability[mCameraId]->active_array_size.top +
11209 gCamCapability[mCameraId]->active_array_size.height,
11210 0};
11211 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11212 sizeof(active_region) / sizeof(active_region[0]));
11213 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11214 sizeof(active_region) / sizeof(active_region[0]));
11215
11216 /* black level lock */
11217 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11218 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11219
Thierry Strudel3d639192016-09-09 11:52:26 -070011220 //special defaults for manual template
11221 if (type == CAMERA3_TEMPLATE_MANUAL) {
11222 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11223 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11224
11225 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11226 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11227
11228 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11229 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11230
11231 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11232 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11233
11234 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11235 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11236
11237 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11238 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11239 }
11240
11241
11242 /* TNR
11243 * We'll use this location to determine which modes TNR will be set.
11244 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11245 * This is not to be confused with linking on a per stream basis that decision
11246 * is still on per-session basis and will be handled as part of config stream
11247 */
11248 uint8_t tnr_enable = 0;
11249
11250 if (m_bTnrPreview || m_bTnrVideo) {
11251
11252 switch (type) {
11253 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11254 tnr_enable = 1;
11255 break;
11256
11257 default:
11258 tnr_enable = 0;
11259 break;
11260 }
11261
11262 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11263 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11264 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11265
11266 LOGD("TNR:%d with process plate %d for template:%d",
11267 tnr_enable, tnr_process_type, type);
11268 }
11269
11270 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011271 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011272 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11273
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011274 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011275 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11276
Shuzhen Wang920ea402017-05-03 08:49:39 -070011277 uint8_t related_camera_id = mCameraId;
11278 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011279
11280 /* CDS default */
11281 char prop[PROPERTY_VALUE_MAX];
11282 memset(prop, 0, sizeof(prop));
11283 property_get("persist.camera.CDS", prop, "Auto");
11284 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11285 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11286 if (CAM_CDS_MODE_MAX == cds_mode) {
11287 cds_mode = CAM_CDS_MODE_AUTO;
11288 }
11289
11290 /* Disabling CDS in templates which have TNR enabled*/
11291 if (tnr_enable)
11292 cds_mode = CAM_CDS_MODE_OFF;
11293
11294 int32_t mode = cds_mode;
11295 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011296
Thierry Strudel269c81a2016-10-12 12:13:59 -070011297 /* Manual Convergence AEC Speed is disabled by default*/
11298 float default_aec_speed = 0;
11299 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11300
11301 /* Manual Convergence AWB Speed is disabled by default*/
11302 float default_awb_speed = 0;
11303 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11304
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011305 // Set instant AEC to normal convergence by default
11306 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11307 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11308
Shuzhen Wang19463d72016-03-08 11:09:52 -080011309 /* hybrid ae */
11310 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11311
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011312 if (gExposeEnableZslKey) {
11313 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11314 }
11315
Thierry Strudel3d639192016-09-09 11:52:26 -070011316 mDefaultMetadata[type] = settings.release();
11317
11318 return mDefaultMetadata[type];
11319}
11320
11321/*===========================================================================
11322 * FUNCTION : setFrameParameters
11323 *
11324 * DESCRIPTION: set parameters per frame as requested in the metadata from
11325 * framework
11326 *
11327 * PARAMETERS :
11328 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011329 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011330 * @blob_request: Whether this request is a blob request or not
11331 *
11332 * RETURN : success: NO_ERROR
11333 * failure:
11334 *==========================================================================*/
11335int QCamera3HardwareInterface::setFrameParameters(
11336 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011337 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011338 int blob_request,
11339 uint32_t snapshotStreamId)
11340{
11341 /*translate from camera_metadata_t type to parm_type_t*/
11342 int rc = 0;
11343 int32_t hal_version = CAM_HAL_V3;
11344
11345 clear_metadata_buffer(mParameters);
11346 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11347 LOGE("Failed to set hal version in the parameters");
11348 return BAD_VALUE;
11349 }
11350
11351 /*we need to update the frame number in the parameters*/
11352 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11353 request->frame_number)) {
11354 LOGE("Failed to set the frame number in the parameters");
11355 return BAD_VALUE;
11356 }
11357
11358 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011359 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011360 LOGE("Failed to set stream type mask in the parameters");
11361 return BAD_VALUE;
11362 }
11363
11364 if (mUpdateDebugLevel) {
11365 uint32_t dummyDebugLevel = 0;
11366 /* The value of dummyDebugLevel is irrelavent. On
11367 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11368 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11369 dummyDebugLevel)) {
11370 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11371 return BAD_VALUE;
11372 }
11373 mUpdateDebugLevel = false;
11374 }
11375
11376 if(request->settings != NULL){
11377 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11378 if (blob_request)
11379 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11380 }
11381
11382 return rc;
11383}
11384
11385/*===========================================================================
11386 * FUNCTION : setReprocParameters
11387 *
11388 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11389 * return it.
11390 *
11391 * PARAMETERS :
11392 * @request : request that needs to be serviced
11393 *
11394 * RETURN : success: NO_ERROR
11395 * failure:
11396 *==========================================================================*/
11397int32_t QCamera3HardwareInterface::setReprocParameters(
11398 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11399 uint32_t snapshotStreamId)
11400{
11401 /*translate from camera_metadata_t type to parm_type_t*/
11402 int rc = 0;
11403
11404 if (NULL == request->settings){
11405 LOGE("Reprocess settings cannot be NULL");
11406 return BAD_VALUE;
11407 }
11408
11409 if (NULL == reprocParam) {
11410 LOGE("Invalid reprocessing metadata buffer");
11411 return BAD_VALUE;
11412 }
11413 clear_metadata_buffer(reprocParam);
11414
11415 /*we need to update the frame number in the parameters*/
11416 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11417 request->frame_number)) {
11418 LOGE("Failed to set the frame number in the parameters");
11419 return BAD_VALUE;
11420 }
11421
11422 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11423 if (rc < 0) {
11424 LOGE("Failed to translate reproc request");
11425 return rc;
11426 }
11427
11428 CameraMetadata frame_settings;
11429 frame_settings = request->settings;
11430 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11431 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11432 int32_t *crop_count =
11433 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11434 int32_t *crop_data =
11435 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11436 int32_t *roi_map =
11437 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11438 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11439 cam_crop_data_t crop_meta;
11440 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11441 crop_meta.num_of_streams = 1;
11442 crop_meta.crop_info[0].crop.left = crop_data[0];
11443 crop_meta.crop_info[0].crop.top = crop_data[1];
11444 crop_meta.crop_info[0].crop.width = crop_data[2];
11445 crop_meta.crop_info[0].crop.height = crop_data[3];
11446
11447 crop_meta.crop_info[0].roi_map.left =
11448 roi_map[0];
11449 crop_meta.crop_info[0].roi_map.top =
11450 roi_map[1];
11451 crop_meta.crop_info[0].roi_map.width =
11452 roi_map[2];
11453 crop_meta.crop_info[0].roi_map.height =
11454 roi_map[3];
11455
11456 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11457 rc = BAD_VALUE;
11458 }
11459 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11460 request->input_buffer->stream,
11461 crop_meta.crop_info[0].crop.left,
11462 crop_meta.crop_info[0].crop.top,
11463 crop_meta.crop_info[0].crop.width,
11464 crop_meta.crop_info[0].crop.height);
11465 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11466 request->input_buffer->stream,
11467 crop_meta.crop_info[0].roi_map.left,
11468 crop_meta.crop_info[0].roi_map.top,
11469 crop_meta.crop_info[0].roi_map.width,
11470 crop_meta.crop_info[0].roi_map.height);
11471 } else {
11472 LOGE("Invalid reprocess crop count %d!", *crop_count);
11473 }
11474 } else {
11475 LOGE("No crop data from matching output stream");
11476 }
11477
11478 /* These settings are not needed for regular requests so handle them specially for
11479 reprocess requests; information needed for EXIF tags */
11480 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11481 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11482 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11483 if (NAME_NOT_FOUND != val) {
11484 uint32_t flashMode = (uint32_t)val;
11485 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11486 rc = BAD_VALUE;
11487 }
11488 } else {
11489 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11490 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11491 }
11492 } else {
11493 LOGH("No flash mode in reprocess settings");
11494 }
11495
11496 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11497 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11498 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11499 rc = BAD_VALUE;
11500 }
11501 } else {
11502 LOGH("No flash state in reprocess settings");
11503 }
11504
11505 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11506 uint8_t *reprocessFlags =
11507 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11508 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11509 *reprocessFlags)) {
11510 rc = BAD_VALUE;
11511 }
11512 }
11513
Thierry Strudel54dc9782017-02-15 12:12:10 -080011514 // Add exif debug data to internal metadata
11515 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11516 mm_jpeg_debug_exif_params_t *debug_params =
11517 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11518 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11519 // AE
11520 if (debug_params->ae_debug_params_valid == TRUE) {
11521 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11522 debug_params->ae_debug_params);
11523 }
11524 // AWB
11525 if (debug_params->awb_debug_params_valid == TRUE) {
11526 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11527 debug_params->awb_debug_params);
11528 }
11529 // AF
11530 if (debug_params->af_debug_params_valid == TRUE) {
11531 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11532 debug_params->af_debug_params);
11533 }
11534 // ASD
11535 if (debug_params->asd_debug_params_valid == TRUE) {
11536 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11537 debug_params->asd_debug_params);
11538 }
11539 // Stats
11540 if (debug_params->stats_debug_params_valid == TRUE) {
11541 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11542 debug_params->stats_debug_params);
11543 }
11544 // BE Stats
11545 if (debug_params->bestats_debug_params_valid == TRUE) {
11546 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11547 debug_params->bestats_debug_params);
11548 }
11549 // BHIST
11550 if (debug_params->bhist_debug_params_valid == TRUE) {
11551 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11552 debug_params->bhist_debug_params);
11553 }
11554 // 3A Tuning
11555 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11556 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11557 debug_params->q3a_tuning_debug_params);
11558 }
11559 }
11560
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011561 // Add metadata which reprocess needs
11562 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11563 cam_reprocess_info_t *repro_info =
11564 (cam_reprocess_info_t *)frame_settings.find
11565 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011566 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011567 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011568 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011569 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011570 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011571 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011572 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011573 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011574 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011575 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011576 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011577 repro_info->pipeline_flip);
11578 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11579 repro_info->af_roi);
11580 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11581 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011582 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11583 CAM_INTF_PARM_ROTATION metadata then has been added in
11584 translateToHalMetadata. HAL need to keep this new rotation
11585 metadata. Otherwise, the old rotation info saved in the vendor tag
11586 would be used */
11587 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11588 CAM_INTF_PARM_ROTATION, reprocParam) {
11589 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11590 } else {
11591 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011592 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011593 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011594 }
11595
11596 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11597 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11598 roi.width and roi.height would be the final JPEG size.
11599 For now, HAL only checks this for reprocess request */
11600 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11601 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11602 uint8_t *enable =
11603 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11604 if (*enable == TRUE) {
11605 int32_t *crop_data =
11606 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11607 cam_stream_crop_info_t crop_meta;
11608 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11609 crop_meta.stream_id = 0;
11610 crop_meta.crop.left = crop_data[0];
11611 crop_meta.crop.top = crop_data[1];
11612 crop_meta.crop.width = crop_data[2];
11613 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011614 // The JPEG crop roi should match cpp output size
11615 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11616 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11617 crop_meta.roi_map.left = 0;
11618 crop_meta.roi_map.top = 0;
11619 crop_meta.roi_map.width = cpp_crop->crop.width;
11620 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011621 }
11622 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11623 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011624 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011625 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011626 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11627 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011628 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011629 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11630
11631 // Add JPEG scale information
11632 cam_dimension_t scale_dim;
11633 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11634 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11635 int32_t *roi =
11636 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11637 scale_dim.width = roi[2];
11638 scale_dim.height = roi[3];
11639 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11640 scale_dim);
11641 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11642 scale_dim.width, scale_dim.height, mCameraId);
11643 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011644 }
11645 }
11646
11647 return rc;
11648}
11649
11650/*===========================================================================
11651 * FUNCTION : saveRequestSettings
11652 *
11653 * DESCRIPTION: Add any settings that might have changed to the request settings
11654 * and save the settings to be applied on the frame
11655 *
11656 * PARAMETERS :
11657 * @jpegMetadata : the extracted and/or modified jpeg metadata
11658 * @request : request with initial settings
11659 *
11660 * RETURN :
11661 * camera_metadata_t* : pointer to the saved request settings
11662 *==========================================================================*/
11663camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11664 const CameraMetadata &jpegMetadata,
11665 camera3_capture_request_t *request)
11666{
11667 camera_metadata_t *resultMetadata;
11668 CameraMetadata camMetadata;
11669 camMetadata = request->settings;
11670
11671 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11672 int32_t thumbnail_size[2];
11673 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11674 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11675 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11676 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11677 }
11678
11679 if (request->input_buffer != NULL) {
11680 uint8_t reprocessFlags = 1;
11681 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11682 (uint8_t*)&reprocessFlags,
11683 sizeof(reprocessFlags));
11684 }
11685
11686 resultMetadata = camMetadata.release();
11687 return resultMetadata;
11688}
11689
11690/*===========================================================================
11691 * FUNCTION : setHalFpsRange
11692 *
11693 * DESCRIPTION: set FPS range parameter
11694 *
11695 *
11696 * PARAMETERS :
11697 * @settings : Metadata from framework
11698 * @hal_metadata: Metadata buffer
11699 *
11700 *
11701 * RETURN : success: NO_ERROR
11702 * failure:
11703 *==========================================================================*/
11704int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11705 metadata_buffer_t *hal_metadata)
11706{
11707 int32_t rc = NO_ERROR;
11708 cam_fps_range_t fps_range;
11709 fps_range.min_fps = (float)
11710 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11711 fps_range.max_fps = (float)
11712 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11713 fps_range.video_min_fps = fps_range.min_fps;
11714 fps_range.video_max_fps = fps_range.max_fps;
11715
11716 LOGD("aeTargetFpsRange fps: [%f %f]",
11717 fps_range.min_fps, fps_range.max_fps);
11718 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11719 * follows:
11720 * ---------------------------------------------------------------|
11721 * Video stream is absent in configure_streams |
11722 * (Camcorder preview before the first video record |
11723 * ---------------------------------------------------------------|
11724 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11725 * | | | vid_min/max_fps|
11726 * ---------------------------------------------------------------|
11727 * NO | [ 30, 240] | 240 | [240, 240] |
11728 * |-------------|-------------|----------------|
11729 * | [240, 240] | 240 | [240, 240] |
11730 * ---------------------------------------------------------------|
11731 * Video stream is present in configure_streams |
11732 * ---------------------------------------------------------------|
11733 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11734 * | | | vid_min/max_fps|
11735 * ---------------------------------------------------------------|
11736 * NO | [ 30, 240] | 240 | [240, 240] |
11737 * (camcorder prev |-------------|-------------|----------------|
11738 * after video rec | [240, 240] | 240 | [240, 240] |
11739 * is stopped) | | | |
11740 * ---------------------------------------------------------------|
11741 * YES | [ 30, 240] | 240 | [240, 240] |
11742 * |-------------|-------------|----------------|
11743 * | [240, 240] | 240 | [240, 240] |
11744 * ---------------------------------------------------------------|
11745 * When Video stream is absent in configure_streams,
11746 * preview fps = sensor_fps / batchsize
11747 * Eg: for 240fps at batchSize 4, preview = 60fps
11748 * for 120fps at batchSize 4, preview = 30fps
11749 *
11750 * When video stream is present in configure_streams, preview fps is as per
11751 * the ratio of preview buffers to video buffers requested in process
11752 * capture request
11753 */
11754 mBatchSize = 0;
11755 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11756 fps_range.min_fps = fps_range.video_max_fps;
11757 fps_range.video_min_fps = fps_range.video_max_fps;
11758 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11759 fps_range.max_fps);
11760 if (NAME_NOT_FOUND != val) {
11761 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11762 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11763 return BAD_VALUE;
11764 }
11765
11766 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11767 /* If batchmode is currently in progress and the fps changes,
11768 * set the flag to restart the sensor */
11769 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11770 (mHFRVideoFps != fps_range.max_fps)) {
11771 mNeedSensorRestart = true;
11772 }
11773 mHFRVideoFps = fps_range.max_fps;
11774 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11775 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11776 mBatchSize = MAX_HFR_BATCH_SIZE;
11777 }
11778 }
11779 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11780
11781 }
11782 } else {
11783 /* HFR mode is session param in backend/ISP. This should be reset when
11784 * in non-HFR mode */
11785 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11786 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11787 return BAD_VALUE;
11788 }
11789 }
11790 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11791 return BAD_VALUE;
11792 }
11793 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11794 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11795 return rc;
11796}
11797
11798/*===========================================================================
11799 * FUNCTION : translateToHalMetadata
11800 *
11801 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11802 *
11803 *
11804 * PARAMETERS :
11805 * @request : request sent from framework
11806 *
11807 *
11808 * RETURN : success: NO_ERROR
11809 * failure:
11810 *==========================================================================*/
11811int QCamera3HardwareInterface::translateToHalMetadata
11812 (const camera3_capture_request_t *request,
11813 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011814 uint32_t snapshotStreamId) {
11815 if (request == nullptr || hal_metadata == nullptr) {
11816 return BAD_VALUE;
11817 }
11818
11819 int64_t minFrameDuration = getMinFrameDuration(request);
11820
11821 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11822 minFrameDuration);
11823}
11824
11825int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11826 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11827 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11828
Thierry Strudel3d639192016-09-09 11:52:26 -070011829 int rc = 0;
11830 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011831 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011832
11833 /* Do not change the order of the following list unless you know what you are
11834 * doing.
11835 * The order is laid out in such a way that parameters in the front of the table
11836 * may be used to override the parameters later in the table. Examples are:
11837 * 1. META_MODE should precede AEC/AWB/AF MODE
11838 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11839 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11840 * 4. Any mode should precede it's corresponding settings
11841 */
11842 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11843 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11844 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11845 rc = BAD_VALUE;
11846 }
11847 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11848 if (rc != NO_ERROR) {
11849 LOGE("extractSceneMode failed");
11850 }
11851 }
11852
11853 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11854 uint8_t fwk_aeMode =
11855 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11856 uint8_t aeMode;
11857 int32_t redeye;
11858
11859 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11860 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011861 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11862 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011863 } else {
11864 aeMode = CAM_AE_MODE_ON;
11865 }
11866 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11867 redeye = 1;
11868 } else {
11869 redeye = 0;
11870 }
11871
11872 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11873 fwk_aeMode);
11874 if (NAME_NOT_FOUND != val) {
11875 int32_t flashMode = (int32_t)val;
11876 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11877 }
11878
11879 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11880 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11881 rc = BAD_VALUE;
11882 }
11883 }
11884
11885 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11886 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11887 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11888 fwk_whiteLevel);
11889 if (NAME_NOT_FOUND != val) {
11890 uint8_t whiteLevel = (uint8_t)val;
11891 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11892 rc = BAD_VALUE;
11893 }
11894 }
11895 }
11896
11897 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11898 uint8_t fwk_cacMode =
11899 frame_settings.find(
11900 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11901 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11902 fwk_cacMode);
11903 if (NAME_NOT_FOUND != val) {
11904 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11905 bool entryAvailable = FALSE;
11906 // Check whether Frameworks set CAC mode is supported in device or not
11907 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11908 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11909 entryAvailable = TRUE;
11910 break;
11911 }
11912 }
11913 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11914 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11915 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11916 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11917 if (entryAvailable == FALSE) {
11918 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11919 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11920 } else {
11921 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11922 // High is not supported and so set the FAST as spec say's underlying
11923 // device implementation can be the same for both modes.
11924 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11925 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11926 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11927 // in order to avoid the fps drop due to high quality
11928 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11929 } else {
11930 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11931 }
11932 }
11933 }
11934 LOGD("Final cacMode is %d", cacMode);
11935 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11936 rc = BAD_VALUE;
11937 }
11938 } else {
11939 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11940 }
11941 }
11942
Thierry Strudel2896d122017-02-23 19:18:03 -080011943 char af_value[PROPERTY_VALUE_MAX];
11944 property_get("persist.camera.af.infinity", af_value, "0");
11945
Jason Lee84ae9972017-02-24 13:24:24 -080011946 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011947 if (atoi(af_value) == 0) {
11948 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011949 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011950 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11951 fwk_focusMode);
11952 if (NAME_NOT_FOUND != val) {
11953 uint8_t focusMode = (uint8_t)val;
11954 LOGD("set focus mode %d", focusMode);
11955 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11956 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11957 rc = BAD_VALUE;
11958 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011959 }
11960 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011961 } else {
11962 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11963 LOGE("Focus forced to infinity %d", focusMode);
11964 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11965 rc = BAD_VALUE;
11966 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011967 }
11968
Jason Lee84ae9972017-02-24 13:24:24 -080011969 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11970 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011971 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11972 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11973 focalDistance)) {
11974 rc = BAD_VALUE;
11975 }
11976 }
11977
11978 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11979 uint8_t fwk_antibandingMode =
11980 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11981 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11982 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11983 if (NAME_NOT_FOUND != val) {
11984 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011985 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11986 if (m60HzZone) {
11987 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11988 } else {
11989 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11990 }
11991 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011992 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11993 hal_antibandingMode)) {
11994 rc = BAD_VALUE;
11995 }
11996 }
11997 }
11998
11999 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12000 int32_t expCompensation = frame_settings.find(
12001 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12002 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12003 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12004 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12005 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012006 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012007 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12008 expCompensation)) {
12009 rc = BAD_VALUE;
12010 }
12011 }
12012
12013 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12014 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12015 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12016 rc = BAD_VALUE;
12017 }
12018 }
12019 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12020 rc = setHalFpsRange(frame_settings, hal_metadata);
12021 if (rc != NO_ERROR) {
12022 LOGE("setHalFpsRange failed");
12023 }
12024 }
12025
12026 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12027 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12028 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12029 rc = BAD_VALUE;
12030 }
12031 }
12032
12033 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12034 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12035 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12036 fwk_effectMode);
12037 if (NAME_NOT_FOUND != val) {
12038 uint8_t effectMode = (uint8_t)val;
12039 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12040 rc = BAD_VALUE;
12041 }
12042 }
12043 }
12044
12045 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12046 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12047 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12048 colorCorrectMode)) {
12049 rc = BAD_VALUE;
12050 }
12051 }
12052
12053 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12054 cam_color_correct_gains_t colorCorrectGains;
12055 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12056 colorCorrectGains.gains[i] =
12057 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12058 }
12059 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12060 colorCorrectGains)) {
12061 rc = BAD_VALUE;
12062 }
12063 }
12064
12065 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12066 cam_color_correct_matrix_t colorCorrectTransform;
12067 cam_rational_type_t transform_elem;
12068 size_t num = 0;
12069 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12070 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12071 transform_elem.numerator =
12072 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12073 transform_elem.denominator =
12074 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12075 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12076 num++;
12077 }
12078 }
12079 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12080 colorCorrectTransform)) {
12081 rc = BAD_VALUE;
12082 }
12083 }
12084
12085 cam_trigger_t aecTrigger;
12086 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12087 aecTrigger.trigger_id = -1;
12088 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12089 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12090 aecTrigger.trigger =
12091 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12092 aecTrigger.trigger_id =
12093 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12094 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12095 aecTrigger)) {
12096 rc = BAD_VALUE;
12097 }
12098 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12099 aecTrigger.trigger, aecTrigger.trigger_id);
12100 }
12101
12102 /*af_trigger must come with a trigger id*/
12103 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12104 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12105 cam_trigger_t af_trigger;
12106 af_trigger.trigger =
12107 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12108 af_trigger.trigger_id =
12109 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12110 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12111 rc = BAD_VALUE;
12112 }
12113 LOGD("AfTrigger: %d AfTriggerID: %d",
12114 af_trigger.trigger, af_trigger.trigger_id);
12115 }
12116
12117 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12118 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12119 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12120 rc = BAD_VALUE;
12121 }
12122 }
12123 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12124 cam_edge_application_t edge_application;
12125 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012126
Thierry Strudel3d639192016-09-09 11:52:26 -070012127 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12128 edge_application.sharpness = 0;
12129 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012130 edge_application.sharpness =
12131 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12132 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12133 int32_t sharpness =
12134 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12135 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12136 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12137 LOGD("Setting edge mode sharpness %d", sharpness);
12138 edge_application.sharpness = sharpness;
12139 }
12140 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012141 }
12142 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12143 rc = BAD_VALUE;
12144 }
12145 }
12146
12147 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12148 int32_t respectFlashMode = 1;
12149 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12150 uint8_t fwk_aeMode =
12151 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012152 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12153 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12154 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012155 respectFlashMode = 0;
12156 LOGH("AE Mode controls flash, ignore android.flash.mode");
12157 }
12158 }
12159 if (respectFlashMode) {
12160 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12161 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12162 LOGH("flash mode after mapping %d", val);
12163 // To check: CAM_INTF_META_FLASH_MODE usage
12164 if (NAME_NOT_FOUND != val) {
12165 uint8_t flashMode = (uint8_t)val;
12166 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12167 rc = BAD_VALUE;
12168 }
12169 }
12170 }
12171 }
12172
12173 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12174 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12175 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12176 rc = BAD_VALUE;
12177 }
12178 }
12179
12180 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12181 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12182 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12183 flashFiringTime)) {
12184 rc = BAD_VALUE;
12185 }
12186 }
12187
12188 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12189 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12190 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12191 hotPixelMode)) {
12192 rc = BAD_VALUE;
12193 }
12194 }
12195
12196 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12197 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12198 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12199 lensAperture)) {
12200 rc = BAD_VALUE;
12201 }
12202 }
12203
12204 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12205 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12206 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12207 filterDensity)) {
12208 rc = BAD_VALUE;
12209 }
12210 }
12211
12212 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12213 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12214 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12215 focalLength)) {
12216 rc = BAD_VALUE;
12217 }
12218 }
12219
12220 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12221 uint8_t optStabMode =
12222 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12223 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12224 optStabMode)) {
12225 rc = BAD_VALUE;
12226 }
12227 }
12228
12229 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12230 uint8_t videoStabMode =
12231 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12232 LOGD("videoStabMode from APP = %d", videoStabMode);
12233 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12234 videoStabMode)) {
12235 rc = BAD_VALUE;
12236 }
12237 }
12238
12239
12240 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12241 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12242 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12243 noiseRedMode)) {
12244 rc = BAD_VALUE;
12245 }
12246 }
12247
12248 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12249 float reprocessEffectiveExposureFactor =
12250 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12251 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12252 reprocessEffectiveExposureFactor)) {
12253 rc = BAD_VALUE;
12254 }
12255 }
12256
12257 cam_crop_region_t scalerCropRegion;
12258 bool scalerCropSet = false;
12259 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12260 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12261 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12262 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12263 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12264
12265 // Map coordinate system from active array to sensor output.
12266 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12267 scalerCropRegion.width, scalerCropRegion.height);
12268
12269 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12270 scalerCropRegion)) {
12271 rc = BAD_VALUE;
12272 }
12273 scalerCropSet = true;
12274 }
12275
12276 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12277 int64_t sensorExpTime =
12278 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12279 LOGD("setting sensorExpTime %lld", sensorExpTime);
12280 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12281 sensorExpTime)) {
12282 rc = BAD_VALUE;
12283 }
12284 }
12285
12286 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12287 int64_t sensorFrameDuration =
12288 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012289 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12290 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12291 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12292 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12293 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12294 sensorFrameDuration)) {
12295 rc = BAD_VALUE;
12296 }
12297 }
12298
12299 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12300 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12301 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12302 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12303 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12304 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12305 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12306 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12307 sensorSensitivity)) {
12308 rc = BAD_VALUE;
12309 }
12310 }
12311
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012312#ifndef USE_HAL_3_3
12313 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12314 int32_t ispSensitivity =
12315 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12316 if (ispSensitivity <
12317 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12318 ispSensitivity =
12319 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12320 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12321 }
12322 if (ispSensitivity >
12323 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12324 ispSensitivity =
12325 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12326 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12327 }
12328 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12329 ispSensitivity)) {
12330 rc = BAD_VALUE;
12331 }
12332 }
12333#endif
12334
Thierry Strudel3d639192016-09-09 11:52:26 -070012335 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12336 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12337 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12338 rc = BAD_VALUE;
12339 }
12340 }
12341
12342 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12343 uint8_t fwk_facedetectMode =
12344 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12345
12346 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12347 fwk_facedetectMode);
12348
12349 if (NAME_NOT_FOUND != val) {
12350 uint8_t facedetectMode = (uint8_t)val;
12351 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12352 facedetectMode)) {
12353 rc = BAD_VALUE;
12354 }
12355 }
12356 }
12357
Thierry Strudel54dc9782017-02-15 12:12:10 -080012358 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012359 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012360 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012361 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12362 histogramMode)) {
12363 rc = BAD_VALUE;
12364 }
12365 }
12366
12367 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12368 uint8_t sharpnessMapMode =
12369 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12370 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12371 sharpnessMapMode)) {
12372 rc = BAD_VALUE;
12373 }
12374 }
12375
12376 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12377 uint8_t tonemapMode =
12378 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12379 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12380 rc = BAD_VALUE;
12381 }
12382 }
12383 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12384 /*All tonemap channels will have the same number of points*/
12385 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12386 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12387 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12388 cam_rgb_tonemap_curves tonemapCurves;
12389 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12390 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12391 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12392 tonemapCurves.tonemap_points_cnt,
12393 CAM_MAX_TONEMAP_CURVE_SIZE);
12394 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12395 }
12396
12397 /* ch0 = G*/
12398 size_t point = 0;
12399 cam_tonemap_curve_t tonemapCurveGreen;
12400 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12401 for (size_t j = 0; j < 2; j++) {
12402 tonemapCurveGreen.tonemap_points[i][j] =
12403 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12404 point++;
12405 }
12406 }
12407 tonemapCurves.curves[0] = tonemapCurveGreen;
12408
12409 /* ch 1 = B */
12410 point = 0;
12411 cam_tonemap_curve_t tonemapCurveBlue;
12412 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12413 for (size_t j = 0; j < 2; j++) {
12414 tonemapCurveBlue.tonemap_points[i][j] =
12415 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12416 point++;
12417 }
12418 }
12419 tonemapCurves.curves[1] = tonemapCurveBlue;
12420
12421 /* ch 2 = R */
12422 point = 0;
12423 cam_tonemap_curve_t tonemapCurveRed;
12424 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12425 for (size_t j = 0; j < 2; j++) {
12426 tonemapCurveRed.tonemap_points[i][j] =
12427 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12428 point++;
12429 }
12430 }
12431 tonemapCurves.curves[2] = tonemapCurveRed;
12432
12433 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12434 tonemapCurves)) {
12435 rc = BAD_VALUE;
12436 }
12437 }
12438
12439 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12440 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12441 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12442 captureIntent)) {
12443 rc = BAD_VALUE;
12444 }
12445 }
12446
12447 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12448 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12449 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12450 blackLevelLock)) {
12451 rc = BAD_VALUE;
12452 }
12453 }
12454
12455 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12456 uint8_t lensShadingMapMode =
12457 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12458 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12459 lensShadingMapMode)) {
12460 rc = BAD_VALUE;
12461 }
12462 }
12463
12464 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12465 cam_area_t roi;
12466 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012467 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012468
12469 // Map coordinate system from active array to sensor output.
12470 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12471 roi.rect.height);
12472
12473 if (scalerCropSet) {
12474 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12475 }
12476 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12477 rc = BAD_VALUE;
12478 }
12479 }
12480
12481 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12482 cam_area_t roi;
12483 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012484 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012485
12486 // Map coordinate system from active array to sensor output.
12487 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12488 roi.rect.height);
12489
12490 if (scalerCropSet) {
12491 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12492 }
12493 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12494 rc = BAD_VALUE;
12495 }
12496 }
12497
12498 // CDS for non-HFR non-video mode
12499 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12500 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12501 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12502 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12503 LOGE("Invalid CDS mode %d!", *fwk_cds);
12504 } else {
12505 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12506 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12507 rc = BAD_VALUE;
12508 }
12509 }
12510 }
12511
Thierry Strudel04e026f2016-10-10 11:27:36 -070012512 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012513 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012514 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012515 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12516 }
12517 if (m_bVideoHdrEnabled)
12518 vhdr = CAM_VIDEO_HDR_MODE_ON;
12519
Thierry Strudel54dc9782017-02-15 12:12:10 -080012520 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12521
12522 if(vhdr != curr_hdr_state)
12523 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12524
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012525 rc = setVideoHdrMode(mParameters, vhdr);
12526 if (rc != NO_ERROR) {
12527 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012528 }
12529
12530 //IR
12531 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12532 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12533 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012534 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12535 uint8_t isIRon = 0;
12536
12537 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012538 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12539 LOGE("Invalid IR mode %d!", fwk_ir);
12540 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012541 if(isIRon != curr_ir_state )
12542 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12543
Thierry Strudel04e026f2016-10-10 11:27:36 -070012544 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12545 CAM_INTF_META_IR_MODE, fwk_ir)) {
12546 rc = BAD_VALUE;
12547 }
12548 }
12549 }
12550
Thierry Strudel54dc9782017-02-15 12:12:10 -080012551 //Binning Correction Mode
12552 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12553 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12554 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12555 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12556 || (0 > fwk_binning_correction)) {
12557 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12558 } else {
12559 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12560 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12561 rc = BAD_VALUE;
12562 }
12563 }
12564 }
12565
Thierry Strudel269c81a2016-10-12 12:13:59 -070012566 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12567 float aec_speed;
12568 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12569 LOGD("AEC Speed :%f", aec_speed);
12570 if ( aec_speed < 0 ) {
12571 LOGE("Invalid AEC mode %f!", aec_speed);
12572 } else {
12573 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12574 aec_speed)) {
12575 rc = BAD_VALUE;
12576 }
12577 }
12578 }
12579
12580 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12581 float awb_speed;
12582 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12583 LOGD("AWB Speed :%f", awb_speed);
12584 if ( awb_speed < 0 ) {
12585 LOGE("Invalid AWB mode %f!", awb_speed);
12586 } else {
12587 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12588 awb_speed)) {
12589 rc = BAD_VALUE;
12590 }
12591 }
12592 }
12593
Thierry Strudel3d639192016-09-09 11:52:26 -070012594 // TNR
12595 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12596 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12597 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012598 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012599 cam_denoise_param_t tnr;
12600 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12601 tnr.process_plates =
12602 (cam_denoise_process_type_t)frame_settings.find(
12603 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12604 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012605
12606 if(b_TnrRequested != curr_tnr_state)
12607 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12608
Thierry Strudel3d639192016-09-09 11:52:26 -070012609 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12610 rc = BAD_VALUE;
12611 }
12612 }
12613
Thierry Strudel54dc9782017-02-15 12:12:10 -080012614 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012615 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012616 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012617 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12618 *exposure_metering_mode)) {
12619 rc = BAD_VALUE;
12620 }
12621 }
12622
Thierry Strudel3d639192016-09-09 11:52:26 -070012623 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12624 int32_t fwk_testPatternMode =
12625 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12626 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12627 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12628
12629 if (NAME_NOT_FOUND != testPatternMode) {
12630 cam_test_pattern_data_t testPatternData;
12631 memset(&testPatternData, 0, sizeof(testPatternData));
12632 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12633 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12634 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12635 int32_t *fwk_testPatternData =
12636 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12637 testPatternData.r = fwk_testPatternData[0];
12638 testPatternData.b = fwk_testPatternData[3];
12639 switch (gCamCapability[mCameraId]->color_arrangement) {
12640 case CAM_FILTER_ARRANGEMENT_RGGB:
12641 case CAM_FILTER_ARRANGEMENT_GRBG:
12642 testPatternData.gr = fwk_testPatternData[1];
12643 testPatternData.gb = fwk_testPatternData[2];
12644 break;
12645 case CAM_FILTER_ARRANGEMENT_GBRG:
12646 case CAM_FILTER_ARRANGEMENT_BGGR:
12647 testPatternData.gr = fwk_testPatternData[2];
12648 testPatternData.gb = fwk_testPatternData[1];
12649 break;
12650 default:
12651 LOGE("color arrangement %d is not supported",
12652 gCamCapability[mCameraId]->color_arrangement);
12653 break;
12654 }
12655 }
12656 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12657 testPatternData)) {
12658 rc = BAD_VALUE;
12659 }
12660 } else {
12661 LOGE("Invalid framework sensor test pattern mode %d",
12662 fwk_testPatternMode);
12663 }
12664 }
12665
12666 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12667 size_t count = 0;
12668 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12669 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12670 gps_coords.data.d, gps_coords.count, count);
12671 if (gps_coords.count != count) {
12672 rc = BAD_VALUE;
12673 }
12674 }
12675
12676 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12677 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12678 size_t count = 0;
12679 const char *gps_methods_src = (const char *)
12680 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12681 memset(gps_methods, '\0', sizeof(gps_methods));
12682 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12683 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12684 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12685 if (GPS_PROCESSING_METHOD_SIZE != count) {
12686 rc = BAD_VALUE;
12687 }
12688 }
12689
12690 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12691 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12692 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12693 gps_timestamp)) {
12694 rc = BAD_VALUE;
12695 }
12696 }
12697
12698 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12699 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12700 cam_rotation_info_t rotation_info;
12701 if (orientation == 0) {
12702 rotation_info.rotation = ROTATE_0;
12703 } else if (orientation == 90) {
12704 rotation_info.rotation = ROTATE_90;
12705 } else if (orientation == 180) {
12706 rotation_info.rotation = ROTATE_180;
12707 } else if (orientation == 270) {
12708 rotation_info.rotation = ROTATE_270;
12709 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012710 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012711 rotation_info.streamId = snapshotStreamId;
12712 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12713 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12714 rc = BAD_VALUE;
12715 }
12716 }
12717
12718 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12719 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12720 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12721 rc = BAD_VALUE;
12722 }
12723 }
12724
12725 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12726 uint32_t thumb_quality = (uint32_t)
12727 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12728 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12729 thumb_quality)) {
12730 rc = BAD_VALUE;
12731 }
12732 }
12733
12734 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12735 cam_dimension_t dim;
12736 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12737 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12738 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12739 rc = BAD_VALUE;
12740 }
12741 }
12742
12743 // Internal metadata
12744 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12745 size_t count = 0;
12746 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12747 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12748 privatedata.data.i32, privatedata.count, count);
12749 if (privatedata.count != count) {
12750 rc = BAD_VALUE;
12751 }
12752 }
12753
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012754 // ISO/Exposure Priority
12755 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12756 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12757 cam_priority_mode_t mode =
12758 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12759 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12760 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12761 use_iso_exp_pty.previewOnly = FALSE;
12762 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12763 use_iso_exp_pty.value = *ptr;
12764
12765 if(CAM_ISO_PRIORITY == mode) {
12766 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12767 use_iso_exp_pty)) {
12768 rc = BAD_VALUE;
12769 }
12770 }
12771 else {
12772 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12773 use_iso_exp_pty)) {
12774 rc = BAD_VALUE;
12775 }
12776 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012777
12778 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12779 rc = BAD_VALUE;
12780 }
12781 }
12782 } else {
12783 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12784 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012785 }
12786 }
12787
12788 // Saturation
12789 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12790 int32_t* use_saturation =
12791 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12792 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12793 rc = BAD_VALUE;
12794 }
12795 }
12796
Thierry Strudel3d639192016-09-09 11:52:26 -070012797 // EV step
12798 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12799 gCamCapability[mCameraId]->exp_compensation_step)) {
12800 rc = BAD_VALUE;
12801 }
12802
12803 // CDS info
12804 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12805 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12806 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12807
12808 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12809 CAM_INTF_META_CDS_DATA, *cdsData)) {
12810 rc = BAD_VALUE;
12811 }
12812 }
12813
Shuzhen Wang19463d72016-03-08 11:09:52 -080012814 // Hybrid AE
12815 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12816 uint8_t *hybrid_ae = (uint8_t *)
12817 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12818
12819 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12820 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12821 rc = BAD_VALUE;
12822 }
12823 }
12824
Shuzhen Wang14415f52016-11-16 18:26:18 -080012825 // Histogram
12826 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12827 uint8_t histogramMode =
12828 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12829 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12830 histogramMode)) {
12831 rc = BAD_VALUE;
12832 }
12833 }
12834
12835 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12836 int32_t histogramBins =
12837 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12838 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12839 histogramBins)) {
12840 rc = BAD_VALUE;
12841 }
12842 }
12843
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012844 // Tracking AF
12845 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12846 uint8_t trackingAfTrigger =
12847 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12848 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12849 trackingAfTrigger)) {
12850 rc = BAD_VALUE;
12851 }
12852 }
12853
Thierry Strudel3d639192016-09-09 11:52:26 -070012854 return rc;
12855}
12856
12857/*===========================================================================
12858 * FUNCTION : captureResultCb
12859 *
12860 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12861 *
12862 * PARAMETERS :
12863 * @frame : frame information from mm-camera-interface
12864 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12865 * @userdata: userdata
12866 *
12867 * RETURN : NONE
12868 *==========================================================================*/
12869void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12870 camera3_stream_buffer_t *buffer,
12871 uint32_t frame_number, bool isInputBuffer, void *userdata)
12872{
12873 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12874 if (hw == NULL) {
12875 LOGE("Invalid hw %p", hw);
12876 return;
12877 }
12878
12879 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12880 return;
12881}
12882
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012883/*===========================================================================
12884 * FUNCTION : setBufferErrorStatus
12885 *
12886 * DESCRIPTION: Callback handler for channels to report any buffer errors
12887 *
12888 * PARAMETERS :
12889 * @ch : Channel on which buffer error is reported from
12890 * @frame_number : frame number on which buffer error is reported on
12891 * @buffer_status : buffer error status
12892 * @userdata: userdata
12893 *
12894 * RETURN : NONE
12895 *==========================================================================*/
12896void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12897 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12898{
12899 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12900 if (hw == NULL) {
12901 LOGE("Invalid hw %p", hw);
12902 return;
12903 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012904
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012905 hw->setBufferErrorStatus(ch, frame_number, err);
12906 return;
12907}
12908
12909void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12910 uint32_t frameNumber, camera3_buffer_status_t err)
12911{
12912 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12913 pthread_mutex_lock(&mMutex);
12914
12915 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12916 if (req.frame_number != frameNumber)
12917 continue;
12918 for (auto& k : req.mPendingBufferList) {
12919 if(k.stream->priv == ch) {
12920 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12921 }
12922 }
12923 }
12924
12925 pthread_mutex_unlock(&mMutex);
12926 return;
12927}
Thierry Strudel3d639192016-09-09 11:52:26 -070012928/*===========================================================================
12929 * FUNCTION : initialize
12930 *
12931 * DESCRIPTION: Pass framework callback pointers to HAL
12932 *
12933 * PARAMETERS :
12934 *
12935 *
12936 * RETURN : Success : 0
12937 * Failure: -ENODEV
12938 *==========================================================================*/
12939
12940int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12941 const camera3_callback_ops_t *callback_ops)
12942{
12943 LOGD("E");
12944 QCamera3HardwareInterface *hw =
12945 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12946 if (!hw) {
12947 LOGE("NULL camera device");
12948 return -ENODEV;
12949 }
12950
12951 int rc = hw->initialize(callback_ops);
12952 LOGD("X");
12953 return rc;
12954}
12955
12956/*===========================================================================
12957 * FUNCTION : configure_streams
12958 *
12959 * DESCRIPTION:
12960 *
12961 * PARAMETERS :
12962 *
12963 *
12964 * RETURN : Success: 0
12965 * Failure: -EINVAL (if stream configuration is invalid)
12966 * -ENODEV (fatal error)
12967 *==========================================================================*/
12968
12969int QCamera3HardwareInterface::configure_streams(
12970 const struct camera3_device *device,
12971 camera3_stream_configuration_t *stream_list)
12972{
12973 LOGD("E");
12974 QCamera3HardwareInterface *hw =
12975 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12976 if (!hw) {
12977 LOGE("NULL camera device");
12978 return -ENODEV;
12979 }
12980 int rc = hw->configureStreams(stream_list);
12981 LOGD("X");
12982 return rc;
12983}
12984
12985/*===========================================================================
12986 * FUNCTION : construct_default_request_settings
12987 *
12988 * DESCRIPTION: Configure a settings buffer to meet the required use case
12989 *
12990 * PARAMETERS :
12991 *
12992 *
12993 * RETURN : Success: Return valid metadata
12994 * Failure: Return NULL
12995 *==========================================================================*/
12996const camera_metadata_t* QCamera3HardwareInterface::
12997 construct_default_request_settings(const struct camera3_device *device,
12998 int type)
12999{
13000
13001 LOGD("E");
13002 camera_metadata_t* fwk_metadata = NULL;
13003 QCamera3HardwareInterface *hw =
13004 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13005 if (!hw) {
13006 LOGE("NULL camera device");
13007 return NULL;
13008 }
13009
13010 fwk_metadata = hw->translateCapabilityToMetadata(type);
13011
13012 LOGD("X");
13013 return fwk_metadata;
13014}
13015
13016/*===========================================================================
13017 * FUNCTION : process_capture_request
13018 *
13019 * DESCRIPTION:
13020 *
13021 * PARAMETERS :
13022 *
13023 *
13024 * RETURN :
13025 *==========================================================================*/
13026int QCamera3HardwareInterface::process_capture_request(
13027 const struct camera3_device *device,
13028 camera3_capture_request_t *request)
13029{
13030 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013031 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013032 QCamera3HardwareInterface *hw =
13033 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13034 if (!hw) {
13035 LOGE("NULL camera device");
13036 return -EINVAL;
13037 }
13038
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013039 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013040 LOGD("X");
13041 return rc;
13042}
13043
13044/*===========================================================================
13045 * FUNCTION : dump
13046 *
13047 * DESCRIPTION:
13048 *
13049 * PARAMETERS :
13050 *
13051 *
13052 * RETURN :
13053 *==========================================================================*/
13054
13055void QCamera3HardwareInterface::dump(
13056 const struct camera3_device *device, int fd)
13057{
13058 /* Log level property is read when "adb shell dumpsys media.camera" is
13059 called so that the log level can be controlled without restarting
13060 the media server */
13061 getLogLevel();
13062
13063 LOGD("E");
13064 QCamera3HardwareInterface *hw =
13065 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13066 if (!hw) {
13067 LOGE("NULL camera device");
13068 return;
13069 }
13070
13071 hw->dump(fd);
13072 LOGD("X");
13073 return;
13074}
13075
13076/*===========================================================================
13077 * FUNCTION : flush
13078 *
13079 * DESCRIPTION:
13080 *
13081 * PARAMETERS :
13082 *
13083 *
13084 * RETURN :
13085 *==========================================================================*/
13086
13087int QCamera3HardwareInterface::flush(
13088 const struct camera3_device *device)
13089{
13090 int rc;
13091 LOGD("E");
13092 QCamera3HardwareInterface *hw =
13093 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13094 if (!hw) {
13095 LOGE("NULL camera device");
13096 return -EINVAL;
13097 }
13098
13099 pthread_mutex_lock(&hw->mMutex);
13100 // Validate current state
13101 switch (hw->mState) {
13102 case STARTED:
13103 /* valid state */
13104 break;
13105
13106 case ERROR:
13107 pthread_mutex_unlock(&hw->mMutex);
13108 hw->handleCameraDeviceError();
13109 return -ENODEV;
13110
13111 default:
13112 LOGI("Flush returned during state %d", hw->mState);
13113 pthread_mutex_unlock(&hw->mMutex);
13114 return 0;
13115 }
13116 pthread_mutex_unlock(&hw->mMutex);
13117
13118 rc = hw->flush(true /* restart channels */ );
13119 LOGD("X");
13120 return rc;
13121}
13122
13123/*===========================================================================
13124 * FUNCTION : close_camera_device
13125 *
13126 * DESCRIPTION:
13127 *
13128 * PARAMETERS :
13129 *
13130 *
13131 * RETURN :
13132 *==========================================================================*/
13133int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13134{
13135 int ret = NO_ERROR;
13136 QCamera3HardwareInterface *hw =
13137 reinterpret_cast<QCamera3HardwareInterface *>(
13138 reinterpret_cast<camera3_device_t *>(device)->priv);
13139 if (!hw) {
13140 LOGE("NULL camera device");
13141 return BAD_VALUE;
13142 }
13143
13144 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13145 delete hw;
13146 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013147 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013148 return ret;
13149}
13150
13151/*===========================================================================
13152 * FUNCTION : getWaveletDenoiseProcessPlate
13153 *
13154 * DESCRIPTION: query wavelet denoise process plate
13155 *
13156 * PARAMETERS : None
13157 *
13158 * RETURN : WNR prcocess plate value
13159 *==========================================================================*/
13160cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13161{
13162 char prop[PROPERTY_VALUE_MAX];
13163 memset(prop, 0, sizeof(prop));
13164 property_get("persist.denoise.process.plates", prop, "0");
13165 int processPlate = atoi(prop);
13166 switch(processPlate) {
13167 case 0:
13168 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13169 case 1:
13170 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13171 case 2:
13172 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13173 case 3:
13174 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13175 default:
13176 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13177 }
13178}
13179
13180
13181/*===========================================================================
13182 * FUNCTION : getTemporalDenoiseProcessPlate
13183 *
13184 * DESCRIPTION: query temporal denoise process plate
13185 *
13186 * PARAMETERS : None
13187 *
13188 * RETURN : TNR prcocess plate value
13189 *==========================================================================*/
13190cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13191{
13192 char prop[PROPERTY_VALUE_MAX];
13193 memset(prop, 0, sizeof(prop));
13194 property_get("persist.tnr.process.plates", prop, "0");
13195 int processPlate = atoi(prop);
13196 switch(processPlate) {
13197 case 0:
13198 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13199 case 1:
13200 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13201 case 2:
13202 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13203 case 3:
13204 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13205 default:
13206 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13207 }
13208}
13209
13210
13211/*===========================================================================
13212 * FUNCTION : extractSceneMode
13213 *
13214 * DESCRIPTION: Extract scene mode from frameworks set metadata
13215 *
13216 * PARAMETERS :
13217 * @frame_settings: CameraMetadata reference
13218 * @metaMode: ANDROID_CONTORL_MODE
13219 * @hal_metadata: hal metadata structure
13220 *
13221 * RETURN : None
13222 *==========================================================================*/
13223int32_t QCamera3HardwareInterface::extractSceneMode(
13224 const CameraMetadata &frame_settings, uint8_t metaMode,
13225 metadata_buffer_t *hal_metadata)
13226{
13227 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013228 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13229
13230 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13231 LOGD("Ignoring control mode OFF_KEEP_STATE");
13232 return NO_ERROR;
13233 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013234
13235 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13236 camera_metadata_ro_entry entry =
13237 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13238 if (0 == entry.count)
13239 return rc;
13240
13241 uint8_t fwk_sceneMode = entry.data.u8[0];
13242
13243 int val = lookupHalName(SCENE_MODES_MAP,
13244 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13245 fwk_sceneMode);
13246 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013247 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013248 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013249 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013250 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013251
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013252 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13253 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13254 }
13255
13256 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13257 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013258 cam_hdr_param_t hdr_params;
13259 hdr_params.hdr_enable = 1;
13260 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13261 hdr_params.hdr_need_1x = false;
13262 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13263 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13264 rc = BAD_VALUE;
13265 }
13266 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013267
Thierry Strudel3d639192016-09-09 11:52:26 -070013268 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13269 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13270 rc = BAD_VALUE;
13271 }
13272 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013273
13274 if (mForceHdrSnapshot) {
13275 cam_hdr_param_t hdr_params;
13276 hdr_params.hdr_enable = 1;
13277 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13278 hdr_params.hdr_need_1x = false;
13279 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13280 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13281 rc = BAD_VALUE;
13282 }
13283 }
13284
Thierry Strudel3d639192016-09-09 11:52:26 -070013285 return rc;
13286}
13287
13288/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013289 * FUNCTION : setVideoHdrMode
13290 *
13291 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13292 *
13293 * PARAMETERS :
13294 * @hal_metadata: hal metadata structure
13295 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13296 *
13297 * RETURN : None
13298 *==========================================================================*/
13299int32_t QCamera3HardwareInterface::setVideoHdrMode(
13300 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13301{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013302 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13303 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13304 }
13305
13306 LOGE("Invalid Video HDR mode %d!", vhdr);
13307 return BAD_VALUE;
13308}
13309
13310/*===========================================================================
13311 * FUNCTION : setSensorHDR
13312 *
13313 * DESCRIPTION: Enable/disable sensor HDR.
13314 *
13315 * PARAMETERS :
13316 * @hal_metadata: hal metadata structure
13317 * @enable: boolean whether to enable/disable sensor HDR
13318 *
13319 * RETURN : None
13320 *==========================================================================*/
13321int32_t QCamera3HardwareInterface::setSensorHDR(
13322 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13323{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013324 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013325 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13326
13327 if (enable) {
13328 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13329 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13330 #ifdef _LE_CAMERA_
13331 //Default to staggered HDR for IOT
13332 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13333 #else
13334 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13335 #endif
13336 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13337 }
13338
13339 bool isSupported = false;
13340 switch (sensor_hdr) {
13341 case CAM_SENSOR_HDR_IN_SENSOR:
13342 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13343 CAM_QCOM_FEATURE_SENSOR_HDR) {
13344 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013345 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013346 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013347 break;
13348 case CAM_SENSOR_HDR_ZIGZAG:
13349 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13350 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13351 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013352 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013353 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013354 break;
13355 case CAM_SENSOR_HDR_STAGGERED:
13356 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13357 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13358 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013359 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013360 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013361 break;
13362 case CAM_SENSOR_HDR_OFF:
13363 isSupported = true;
13364 LOGD("Turning off sensor HDR");
13365 break;
13366 default:
13367 LOGE("HDR mode %d not supported", sensor_hdr);
13368 rc = BAD_VALUE;
13369 break;
13370 }
13371
13372 if(isSupported) {
13373 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13374 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13375 rc = BAD_VALUE;
13376 } else {
13377 if(!isVideoHdrEnable)
13378 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013379 }
13380 }
13381 return rc;
13382}
13383
13384/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013385 * FUNCTION : needRotationReprocess
13386 *
13387 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13388 *
13389 * PARAMETERS : none
13390 *
13391 * RETURN : true: needed
13392 * false: no need
13393 *==========================================================================*/
13394bool QCamera3HardwareInterface::needRotationReprocess()
13395{
13396 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13397 // current rotation is not zero, and pp has the capability to process rotation
13398 LOGH("need do reprocess for rotation");
13399 return true;
13400 }
13401
13402 return false;
13403}
13404
13405/*===========================================================================
13406 * FUNCTION : needReprocess
13407 *
13408 * DESCRIPTION: if reprocess in needed
13409 *
13410 * PARAMETERS : none
13411 *
13412 * RETURN : true: needed
13413 * false: no need
13414 *==========================================================================*/
13415bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13416{
13417 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13418 // TODO: add for ZSL HDR later
13419 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13420 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13421 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13422 return true;
13423 } else {
13424 LOGH("already post processed frame");
13425 return false;
13426 }
13427 }
13428 return needRotationReprocess();
13429}
13430
13431/*===========================================================================
13432 * FUNCTION : needJpegExifRotation
13433 *
13434 * DESCRIPTION: if rotation from jpeg is needed
13435 *
13436 * PARAMETERS : none
13437 *
13438 * RETURN : true: needed
13439 * false: no need
13440 *==========================================================================*/
13441bool QCamera3HardwareInterface::needJpegExifRotation()
13442{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013443 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013444 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13445 LOGD("Need use Jpeg EXIF Rotation");
13446 return true;
13447 }
13448 return false;
13449}
13450
13451/*===========================================================================
13452 * FUNCTION : addOfflineReprocChannel
13453 *
13454 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13455 * coming from input channel
13456 *
13457 * PARAMETERS :
13458 * @config : reprocess configuration
13459 * @inputChHandle : pointer to the input (source) channel
13460 *
13461 *
13462 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13463 *==========================================================================*/
13464QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13465 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13466{
13467 int32_t rc = NO_ERROR;
13468 QCamera3ReprocessChannel *pChannel = NULL;
13469
13470 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013471 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13472 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013473 if (NULL == pChannel) {
13474 LOGE("no mem for reprocess channel");
13475 return NULL;
13476 }
13477
13478 rc = pChannel->initialize(IS_TYPE_NONE);
13479 if (rc != NO_ERROR) {
13480 LOGE("init reprocess channel failed, ret = %d", rc);
13481 delete pChannel;
13482 return NULL;
13483 }
13484
13485 // pp feature config
13486 cam_pp_feature_config_t pp_config;
13487 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13488
13489 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13490 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13491 & CAM_QCOM_FEATURE_DSDN) {
13492 //Use CPP CDS incase h/w supports it.
13493 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13494 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13495 }
13496 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13497 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13498 }
13499
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013500 if (config.hdr_param.hdr_enable) {
13501 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13502 pp_config.hdr_param = config.hdr_param;
13503 }
13504
13505 if (mForceHdrSnapshot) {
13506 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13507 pp_config.hdr_param.hdr_enable = 1;
13508 pp_config.hdr_param.hdr_need_1x = 0;
13509 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13510 }
13511
Thierry Strudel3d639192016-09-09 11:52:26 -070013512 rc = pChannel->addReprocStreamsFromSource(pp_config,
13513 config,
13514 IS_TYPE_NONE,
13515 mMetadataChannel);
13516
13517 if (rc != NO_ERROR) {
13518 delete pChannel;
13519 return NULL;
13520 }
13521 return pChannel;
13522}
13523
13524/*===========================================================================
13525 * FUNCTION : getMobicatMask
13526 *
13527 * DESCRIPTION: returns mobicat mask
13528 *
13529 * PARAMETERS : none
13530 *
13531 * RETURN : mobicat mask
13532 *
13533 *==========================================================================*/
13534uint8_t QCamera3HardwareInterface::getMobicatMask()
13535{
13536 return m_MobicatMask;
13537}
13538
13539/*===========================================================================
13540 * FUNCTION : setMobicat
13541 *
13542 * DESCRIPTION: set Mobicat on/off.
13543 *
13544 * PARAMETERS :
13545 * @params : none
13546 *
13547 * RETURN : int32_t type of status
13548 * NO_ERROR -- success
13549 * none-zero failure code
13550 *==========================================================================*/
13551int32_t QCamera3HardwareInterface::setMobicat()
13552{
13553 char value [PROPERTY_VALUE_MAX];
13554 property_get("persist.camera.mobicat", value, "0");
13555 int32_t ret = NO_ERROR;
13556 uint8_t enableMobi = (uint8_t)atoi(value);
13557
13558 if (enableMobi) {
13559 tune_cmd_t tune_cmd;
13560 tune_cmd.type = SET_RELOAD_CHROMATIX;
13561 tune_cmd.module = MODULE_ALL;
13562 tune_cmd.value = TRUE;
13563 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13564 CAM_INTF_PARM_SET_VFE_COMMAND,
13565 tune_cmd);
13566
13567 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13568 CAM_INTF_PARM_SET_PP_COMMAND,
13569 tune_cmd);
13570 }
13571 m_MobicatMask = enableMobi;
13572
13573 return ret;
13574}
13575
13576/*===========================================================================
13577* FUNCTION : getLogLevel
13578*
13579* DESCRIPTION: Reads the log level property into a variable
13580*
13581* PARAMETERS :
13582* None
13583*
13584* RETURN :
13585* None
13586*==========================================================================*/
13587void QCamera3HardwareInterface::getLogLevel()
13588{
13589 char prop[PROPERTY_VALUE_MAX];
13590 uint32_t globalLogLevel = 0;
13591
13592 property_get("persist.camera.hal.debug", prop, "0");
13593 int val = atoi(prop);
13594 if (0 <= val) {
13595 gCamHal3LogLevel = (uint32_t)val;
13596 }
13597
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013598 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013599 gKpiDebugLevel = atoi(prop);
13600
13601 property_get("persist.camera.global.debug", prop, "0");
13602 val = atoi(prop);
13603 if (0 <= val) {
13604 globalLogLevel = (uint32_t)val;
13605 }
13606
13607 /* Highest log level among hal.logs and global.logs is selected */
13608 if (gCamHal3LogLevel < globalLogLevel)
13609 gCamHal3LogLevel = globalLogLevel;
13610
13611 return;
13612}
13613
13614/*===========================================================================
13615 * FUNCTION : validateStreamRotations
13616 *
13617 * DESCRIPTION: Check if the rotations requested are supported
13618 *
13619 * PARAMETERS :
13620 * @stream_list : streams to be configured
13621 *
13622 * RETURN : NO_ERROR on success
13623 * -EINVAL on failure
13624 *
13625 *==========================================================================*/
13626int QCamera3HardwareInterface::validateStreamRotations(
13627 camera3_stream_configuration_t *streamList)
13628{
13629 int rc = NO_ERROR;
13630
13631 /*
13632 * Loop through all streams requested in configuration
13633 * Check if unsupported rotations have been requested on any of them
13634 */
13635 for (size_t j = 0; j < streamList->num_streams; j++){
13636 camera3_stream_t *newStream = streamList->streams[j];
13637
13638 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13639 bool isImplDef = (newStream->format ==
13640 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13641 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13642 isImplDef);
13643
13644 if (isRotated && (!isImplDef || isZsl)) {
13645 LOGE("Error: Unsupported rotation of %d requested for stream"
13646 "type:%d and stream format:%d",
13647 newStream->rotation, newStream->stream_type,
13648 newStream->format);
13649 rc = -EINVAL;
13650 break;
13651 }
13652 }
13653
13654 return rc;
13655}
13656
13657/*===========================================================================
13658* FUNCTION : getFlashInfo
13659*
13660* DESCRIPTION: Retrieve information about whether the device has a flash.
13661*
13662* PARAMETERS :
13663* @cameraId : Camera id to query
13664* @hasFlash : Boolean indicating whether there is a flash device
13665* associated with given camera
13666* @flashNode : If a flash device exists, this will be its device node.
13667*
13668* RETURN :
13669* None
13670*==========================================================================*/
13671void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13672 bool& hasFlash,
13673 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13674{
13675 cam_capability_t* camCapability = gCamCapability[cameraId];
13676 if (NULL == camCapability) {
13677 hasFlash = false;
13678 flashNode[0] = '\0';
13679 } else {
13680 hasFlash = camCapability->flash_available;
13681 strlcpy(flashNode,
13682 (char*)camCapability->flash_dev_name,
13683 QCAMERA_MAX_FILEPATH_LENGTH);
13684 }
13685}
13686
13687/*===========================================================================
13688* FUNCTION : getEepromVersionInfo
13689*
13690* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13691*
13692* PARAMETERS : None
13693*
13694* RETURN : string describing EEPROM version
13695* "\0" if no such info available
13696*==========================================================================*/
13697const char *QCamera3HardwareInterface::getEepromVersionInfo()
13698{
13699 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13700}
13701
13702/*===========================================================================
13703* FUNCTION : getLdafCalib
13704*
13705* DESCRIPTION: Retrieve Laser AF calibration data
13706*
13707* PARAMETERS : None
13708*
13709* RETURN : Two uint32_t describing laser AF calibration data
13710* NULL if none is available.
13711*==========================================================================*/
13712const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13713{
13714 if (mLdafCalibExist) {
13715 return &mLdafCalib[0];
13716 } else {
13717 return NULL;
13718 }
13719}
13720
13721/*===========================================================================
13722 * FUNCTION : dynamicUpdateMetaStreamInfo
13723 *
13724 * DESCRIPTION: This function:
13725 * (1) stops all the channels
13726 * (2) returns error on pending requests and buffers
13727 * (3) sends metastream_info in setparams
13728 * (4) starts all channels
13729 * This is useful when sensor has to be restarted to apply any
13730 * settings such as frame rate from a different sensor mode
13731 *
13732 * PARAMETERS : None
13733 *
13734 * RETURN : NO_ERROR on success
13735 * Error codes on failure
13736 *
13737 *==========================================================================*/
13738int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13739{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013740 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013741 int rc = NO_ERROR;
13742
13743 LOGD("E");
13744
13745 rc = stopAllChannels();
13746 if (rc < 0) {
13747 LOGE("stopAllChannels failed");
13748 return rc;
13749 }
13750
13751 rc = notifyErrorForPendingRequests();
13752 if (rc < 0) {
13753 LOGE("notifyErrorForPendingRequests failed");
13754 return rc;
13755 }
13756
13757 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13758 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13759 "Format:%d",
13760 mStreamConfigInfo.type[i],
13761 mStreamConfigInfo.stream_sizes[i].width,
13762 mStreamConfigInfo.stream_sizes[i].height,
13763 mStreamConfigInfo.postprocess_mask[i],
13764 mStreamConfigInfo.format[i]);
13765 }
13766
13767 /* Send meta stream info once again so that ISP can start */
13768 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13769 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13770 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13771 mParameters);
13772 if (rc < 0) {
13773 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13774 }
13775
13776 rc = startAllChannels();
13777 if (rc < 0) {
13778 LOGE("startAllChannels failed");
13779 return rc;
13780 }
13781
13782 LOGD("X");
13783 return rc;
13784}
13785
13786/*===========================================================================
13787 * FUNCTION : stopAllChannels
13788 *
13789 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13790 *
13791 * PARAMETERS : None
13792 *
13793 * RETURN : NO_ERROR on success
13794 * Error codes on failure
13795 *
13796 *==========================================================================*/
13797int32_t QCamera3HardwareInterface::stopAllChannels()
13798{
13799 int32_t rc = NO_ERROR;
13800
13801 LOGD("Stopping all channels");
13802 // Stop the Streams/Channels
13803 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13804 it != mStreamInfo.end(); it++) {
13805 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13806 if (channel) {
13807 channel->stop();
13808 }
13809 (*it)->status = INVALID;
13810 }
13811
13812 if (mSupportChannel) {
13813 mSupportChannel->stop();
13814 }
13815 if (mAnalysisChannel) {
13816 mAnalysisChannel->stop();
13817 }
13818 if (mRawDumpChannel) {
13819 mRawDumpChannel->stop();
13820 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013821 if (mHdrPlusRawSrcChannel) {
13822 mHdrPlusRawSrcChannel->stop();
13823 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013824 if (mMetadataChannel) {
13825 /* If content of mStreamInfo is not 0, there is metadata stream */
13826 mMetadataChannel->stop();
13827 }
13828
13829 LOGD("All channels stopped");
13830 return rc;
13831}
13832
13833/*===========================================================================
13834 * FUNCTION : startAllChannels
13835 *
13836 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13837 *
13838 * PARAMETERS : None
13839 *
13840 * RETURN : NO_ERROR on success
13841 * Error codes on failure
13842 *
13843 *==========================================================================*/
13844int32_t QCamera3HardwareInterface::startAllChannels()
13845{
13846 int32_t rc = NO_ERROR;
13847
13848 LOGD("Start all channels ");
13849 // Start the Streams/Channels
13850 if (mMetadataChannel) {
13851 /* If content of mStreamInfo is not 0, there is metadata stream */
13852 rc = mMetadataChannel->start();
13853 if (rc < 0) {
13854 LOGE("META channel start failed");
13855 return rc;
13856 }
13857 }
13858 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13859 it != mStreamInfo.end(); it++) {
13860 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13861 if (channel) {
13862 rc = channel->start();
13863 if (rc < 0) {
13864 LOGE("channel start failed");
13865 return rc;
13866 }
13867 }
13868 }
13869 if (mAnalysisChannel) {
13870 mAnalysisChannel->start();
13871 }
13872 if (mSupportChannel) {
13873 rc = mSupportChannel->start();
13874 if (rc < 0) {
13875 LOGE("Support channel start failed");
13876 return rc;
13877 }
13878 }
13879 if (mRawDumpChannel) {
13880 rc = mRawDumpChannel->start();
13881 if (rc < 0) {
13882 LOGE("RAW dump channel start failed");
13883 return rc;
13884 }
13885 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013886 if (mHdrPlusRawSrcChannel) {
13887 rc = mHdrPlusRawSrcChannel->start();
13888 if (rc < 0) {
13889 LOGE("HDR+ RAW channel start failed");
13890 return rc;
13891 }
13892 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013893
13894 LOGD("All channels started");
13895 return rc;
13896}
13897
13898/*===========================================================================
13899 * FUNCTION : notifyErrorForPendingRequests
13900 *
13901 * DESCRIPTION: This function sends error for all the pending requests/buffers
13902 *
13903 * PARAMETERS : None
13904 *
13905 * RETURN : Error codes
13906 * NO_ERROR on success
13907 *
13908 *==========================================================================*/
13909int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13910{
13911 int32_t rc = NO_ERROR;
13912 unsigned int frameNum = 0;
13913 camera3_capture_result_t result;
13914 camera3_stream_buffer_t *pStream_Buf = NULL;
13915
13916 memset(&result, 0, sizeof(camera3_capture_result_t));
13917
13918 if (mPendingRequestsList.size() > 0) {
13919 pendingRequestIterator i = mPendingRequestsList.begin();
13920 frameNum = i->frame_number;
13921 } else {
13922 /* There might still be pending buffers even though there are
13923 no pending requests. Setting the frameNum to MAX so that
13924 all the buffers with smaller frame numbers are returned */
13925 frameNum = UINT_MAX;
13926 }
13927
13928 LOGH("Oldest frame num on mPendingRequestsList = %u",
13929 frameNum);
13930
Emilian Peev7650c122017-01-19 08:24:33 -080013931 notifyErrorFoPendingDepthData(mDepthChannel);
13932
Thierry Strudel3d639192016-09-09 11:52:26 -070013933 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13934 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13935
13936 if (req->frame_number < frameNum) {
13937 // Send Error notify to frameworks for each buffer for which
13938 // metadata buffer is already sent
13939 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13940 req->frame_number, req->mPendingBufferList.size());
13941
13942 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13943 if (NULL == pStream_Buf) {
13944 LOGE("No memory for pending buffers array");
13945 return NO_MEMORY;
13946 }
13947 memset(pStream_Buf, 0,
13948 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13949 result.result = NULL;
13950 result.frame_number = req->frame_number;
13951 result.num_output_buffers = req->mPendingBufferList.size();
13952 result.output_buffers = pStream_Buf;
13953
13954 size_t index = 0;
13955 for (auto info = req->mPendingBufferList.begin();
13956 info != req->mPendingBufferList.end(); ) {
13957
13958 camera3_notify_msg_t notify_msg;
13959 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13960 notify_msg.type = CAMERA3_MSG_ERROR;
13961 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13962 notify_msg.message.error.error_stream = info->stream;
13963 notify_msg.message.error.frame_number = req->frame_number;
13964 pStream_Buf[index].acquire_fence = -1;
13965 pStream_Buf[index].release_fence = -1;
13966 pStream_Buf[index].buffer = info->buffer;
13967 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13968 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013969 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013970 index++;
13971 // Remove buffer from list
13972 info = req->mPendingBufferList.erase(info);
13973 }
13974
13975 // Remove this request from Map
13976 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13977 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13978 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13979
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013980 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013981
13982 delete [] pStream_Buf;
13983 } else {
13984
13985 // Go through the pending requests info and send error request to framework
13986 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13987
13988 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13989
13990 // Send error notify to frameworks
13991 camera3_notify_msg_t notify_msg;
13992 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13993 notify_msg.type = CAMERA3_MSG_ERROR;
13994 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13995 notify_msg.message.error.error_stream = NULL;
13996 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013997 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013998
13999 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
14000 if (NULL == pStream_Buf) {
14001 LOGE("No memory for pending buffers array");
14002 return NO_MEMORY;
14003 }
14004 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
14005
14006 result.result = NULL;
14007 result.frame_number = req->frame_number;
14008 result.input_buffer = i->input_buffer;
14009 result.num_output_buffers = req->mPendingBufferList.size();
14010 result.output_buffers = pStream_Buf;
14011
14012 size_t index = 0;
14013 for (auto info = req->mPendingBufferList.begin();
14014 info != req->mPendingBufferList.end(); ) {
14015 pStream_Buf[index].acquire_fence = -1;
14016 pStream_Buf[index].release_fence = -1;
14017 pStream_Buf[index].buffer = info->buffer;
14018 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
14019 pStream_Buf[index].stream = info->stream;
14020 index++;
14021 // Remove buffer from list
14022 info = req->mPendingBufferList.erase(info);
14023 }
14024
14025 // Remove this request from Map
14026 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
14027 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
14028 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
14029
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014030 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014031 delete [] pStream_Buf;
14032 i = erasePendingRequest(i);
14033 }
14034 }
14035
14036 /* Reset pending frame Drop list and requests list */
14037 mPendingFrameDropList.clear();
14038
14039 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
14040 req.mPendingBufferList.clear();
14041 }
14042 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014043 LOGH("Cleared all the pending buffers ");
14044
14045 return rc;
14046}
14047
14048bool QCamera3HardwareInterface::isOnEncoder(
14049 const cam_dimension_t max_viewfinder_size,
14050 uint32_t width, uint32_t height)
14051{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014052 return ((width > (uint32_t)max_viewfinder_size.width) ||
14053 (height > (uint32_t)max_viewfinder_size.height) ||
14054 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14055 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014056}
14057
14058/*===========================================================================
14059 * FUNCTION : setBundleInfo
14060 *
14061 * DESCRIPTION: Set bundle info for all streams that are bundle.
14062 *
14063 * PARAMETERS : None
14064 *
14065 * RETURN : NO_ERROR on success
14066 * Error codes on failure
14067 *==========================================================================*/
14068int32_t QCamera3HardwareInterface::setBundleInfo()
14069{
14070 int32_t rc = NO_ERROR;
14071
14072 if (mChannelHandle) {
14073 cam_bundle_config_t bundleInfo;
14074 memset(&bundleInfo, 0, sizeof(bundleInfo));
14075 rc = mCameraHandle->ops->get_bundle_info(
14076 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14077 if (rc != NO_ERROR) {
14078 LOGE("get_bundle_info failed");
14079 return rc;
14080 }
14081 if (mAnalysisChannel) {
14082 mAnalysisChannel->setBundleInfo(bundleInfo);
14083 }
14084 if (mSupportChannel) {
14085 mSupportChannel->setBundleInfo(bundleInfo);
14086 }
14087 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14088 it != mStreamInfo.end(); it++) {
14089 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14090 channel->setBundleInfo(bundleInfo);
14091 }
14092 if (mRawDumpChannel) {
14093 mRawDumpChannel->setBundleInfo(bundleInfo);
14094 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014095 if (mHdrPlusRawSrcChannel) {
14096 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14097 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014098 }
14099
14100 return rc;
14101}
14102
14103/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014104 * FUNCTION : setInstantAEC
14105 *
14106 * DESCRIPTION: Set Instant AEC related params.
14107 *
14108 * PARAMETERS :
14109 * @meta: CameraMetadata reference
14110 *
14111 * RETURN : NO_ERROR on success
14112 * Error codes on failure
14113 *==========================================================================*/
14114int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14115{
14116 int32_t rc = NO_ERROR;
14117 uint8_t val = 0;
14118 char prop[PROPERTY_VALUE_MAX];
14119
14120 // First try to configure instant AEC from framework metadata
14121 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14122 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14123 }
14124
14125 // If framework did not set this value, try to read from set prop.
14126 if (val == 0) {
14127 memset(prop, 0, sizeof(prop));
14128 property_get("persist.camera.instant.aec", prop, "0");
14129 val = (uint8_t)atoi(prop);
14130 }
14131
14132 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14133 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14134 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14135 mInstantAEC = val;
14136 mInstantAECSettledFrameNumber = 0;
14137 mInstantAecFrameIdxCount = 0;
14138 LOGH("instantAEC value set %d",val);
14139 if (mInstantAEC) {
14140 memset(prop, 0, sizeof(prop));
14141 property_get("persist.camera.ae.instant.bound", prop, "10");
14142 int32_t aec_frame_skip_cnt = atoi(prop);
14143 if (aec_frame_skip_cnt >= 0) {
14144 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14145 } else {
14146 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14147 rc = BAD_VALUE;
14148 }
14149 }
14150 } else {
14151 LOGE("Bad instant aec value set %d", val);
14152 rc = BAD_VALUE;
14153 }
14154 return rc;
14155}
14156
14157/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014158 * FUNCTION : get_num_overall_buffers
14159 *
14160 * DESCRIPTION: Estimate number of pending buffers across all requests.
14161 *
14162 * PARAMETERS : None
14163 *
14164 * RETURN : Number of overall pending buffers
14165 *
14166 *==========================================================================*/
14167uint32_t PendingBuffersMap::get_num_overall_buffers()
14168{
14169 uint32_t sum_buffers = 0;
14170 for (auto &req : mPendingBuffersInRequest) {
14171 sum_buffers += req.mPendingBufferList.size();
14172 }
14173 return sum_buffers;
14174}
14175
14176/*===========================================================================
14177 * FUNCTION : removeBuf
14178 *
14179 * DESCRIPTION: Remove a matching buffer from tracker.
14180 *
14181 * PARAMETERS : @buffer: image buffer for the callback
14182 *
14183 * RETURN : None
14184 *
14185 *==========================================================================*/
14186void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14187{
14188 bool buffer_found = false;
14189 for (auto req = mPendingBuffersInRequest.begin();
14190 req != mPendingBuffersInRequest.end(); req++) {
14191 for (auto k = req->mPendingBufferList.begin();
14192 k != req->mPendingBufferList.end(); k++ ) {
14193 if (k->buffer == buffer) {
14194 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14195 req->frame_number, buffer);
14196 k = req->mPendingBufferList.erase(k);
14197 if (req->mPendingBufferList.empty()) {
14198 // Remove this request from Map
14199 req = mPendingBuffersInRequest.erase(req);
14200 }
14201 buffer_found = true;
14202 break;
14203 }
14204 }
14205 if (buffer_found) {
14206 break;
14207 }
14208 }
14209 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14210 get_num_overall_buffers());
14211}
14212
14213/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014214 * FUNCTION : getBufErrStatus
14215 *
14216 * DESCRIPTION: get buffer error status
14217 *
14218 * PARAMETERS : @buffer: buffer handle
14219 *
14220 * RETURN : Error status
14221 *
14222 *==========================================================================*/
14223int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14224{
14225 for (auto& req : mPendingBuffersInRequest) {
14226 for (auto& k : req.mPendingBufferList) {
14227 if (k.buffer == buffer)
14228 return k.bufStatus;
14229 }
14230 }
14231 return CAMERA3_BUFFER_STATUS_OK;
14232}
14233
14234/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014235 * FUNCTION : setPAAFSupport
14236 *
14237 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14238 * feature mask according to stream type and filter
14239 * arrangement
14240 *
14241 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14242 * @stream_type: stream type
14243 * @filter_arrangement: filter arrangement
14244 *
14245 * RETURN : None
14246 *==========================================================================*/
14247void QCamera3HardwareInterface::setPAAFSupport(
14248 cam_feature_mask_t& feature_mask,
14249 cam_stream_type_t stream_type,
14250 cam_color_filter_arrangement_t filter_arrangement)
14251{
Thierry Strudel3d639192016-09-09 11:52:26 -070014252 switch (filter_arrangement) {
14253 case CAM_FILTER_ARRANGEMENT_RGGB:
14254 case CAM_FILTER_ARRANGEMENT_GRBG:
14255 case CAM_FILTER_ARRANGEMENT_GBRG:
14256 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014257 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14258 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014259 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014260 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14261 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014262 }
14263 break;
14264 case CAM_FILTER_ARRANGEMENT_Y:
14265 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14266 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14267 }
14268 break;
14269 default:
14270 break;
14271 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014272 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14273 feature_mask, stream_type, filter_arrangement);
14274
14275
Thierry Strudel3d639192016-09-09 11:52:26 -070014276}
14277
14278/*===========================================================================
14279* FUNCTION : getSensorMountAngle
14280*
14281* DESCRIPTION: Retrieve sensor mount angle
14282*
14283* PARAMETERS : None
14284*
14285* RETURN : sensor mount angle in uint32_t
14286*==========================================================================*/
14287uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14288{
14289 return gCamCapability[mCameraId]->sensor_mount_angle;
14290}
14291
14292/*===========================================================================
14293* FUNCTION : getRelatedCalibrationData
14294*
14295* DESCRIPTION: Retrieve related system calibration data
14296*
14297* PARAMETERS : None
14298*
14299* RETURN : Pointer of related system calibration data
14300*==========================================================================*/
14301const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14302{
14303 return (const cam_related_system_calibration_data_t *)
14304 &(gCamCapability[mCameraId]->related_cam_calibration);
14305}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014306
14307/*===========================================================================
14308 * FUNCTION : is60HzZone
14309 *
14310 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14311 *
14312 * PARAMETERS : None
14313 *
14314 * RETURN : True if in 60Hz zone, False otherwise
14315 *==========================================================================*/
14316bool QCamera3HardwareInterface::is60HzZone()
14317{
14318 time_t t = time(NULL);
14319 struct tm lt;
14320
14321 struct tm* r = localtime_r(&t, &lt);
14322
14323 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14324 return true;
14325 else
14326 return false;
14327}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014328
14329/*===========================================================================
14330 * FUNCTION : adjustBlackLevelForCFA
14331 *
14332 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14333 * of bayer CFA (Color Filter Array).
14334 *
14335 * PARAMETERS : @input: black level pattern in the order of RGGB
14336 * @output: black level pattern in the order of CFA
14337 * @color_arrangement: CFA color arrangement
14338 *
14339 * RETURN : None
14340 *==========================================================================*/
14341template<typename T>
14342void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14343 T input[BLACK_LEVEL_PATTERN_CNT],
14344 T output[BLACK_LEVEL_PATTERN_CNT],
14345 cam_color_filter_arrangement_t color_arrangement)
14346{
14347 switch (color_arrangement) {
14348 case CAM_FILTER_ARRANGEMENT_GRBG:
14349 output[0] = input[1];
14350 output[1] = input[0];
14351 output[2] = input[3];
14352 output[3] = input[2];
14353 break;
14354 case CAM_FILTER_ARRANGEMENT_GBRG:
14355 output[0] = input[2];
14356 output[1] = input[3];
14357 output[2] = input[0];
14358 output[3] = input[1];
14359 break;
14360 case CAM_FILTER_ARRANGEMENT_BGGR:
14361 output[0] = input[3];
14362 output[1] = input[2];
14363 output[2] = input[1];
14364 output[3] = input[0];
14365 break;
14366 case CAM_FILTER_ARRANGEMENT_RGGB:
14367 output[0] = input[0];
14368 output[1] = input[1];
14369 output[2] = input[2];
14370 output[3] = input[3];
14371 break;
14372 default:
14373 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14374 break;
14375 }
14376}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014377
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014378void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14379 CameraMetadata &resultMetadata,
14380 std::shared_ptr<metadata_buffer_t> settings)
14381{
14382 if (settings == nullptr) {
14383 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14384 return;
14385 }
14386
14387 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14388 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14389 }
14390
14391 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14392 String8 str((const char *)gps_methods);
14393 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14394 }
14395
14396 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14397 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14398 }
14399
14400 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14401 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14402 }
14403
14404 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14405 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14406 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14407 }
14408
14409 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14410 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14411 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14412 }
14413
14414 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14415 int32_t fwk_thumb_size[2];
14416 fwk_thumb_size[0] = thumb_size->width;
14417 fwk_thumb_size[1] = thumb_size->height;
14418 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14419 }
14420
14421 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14422 uint8_t fwk_intent = intent[0];
14423 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14424 }
14425}
14426
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014427bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14428 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14429 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014430{
14431 if (hdrPlusRequest == nullptr) return false;
14432
14433 // Check noise reduction mode is high quality.
14434 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14435 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14436 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014437 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14438 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014439 return false;
14440 }
14441
14442 // Check edge mode is high quality.
14443 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14444 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14445 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14446 return false;
14447 }
14448
14449 if (request.num_output_buffers != 1 ||
14450 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14451 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014452 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14453 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14454 request.output_buffers[0].stream->width,
14455 request.output_buffers[0].stream->height,
14456 request.output_buffers[0].stream->format);
14457 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014458 return false;
14459 }
14460
14461 // Get a YUV buffer from pic channel.
14462 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14463 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14464 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14465 if (res != OK) {
14466 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14467 __FUNCTION__, strerror(-res), res);
14468 return false;
14469 }
14470
14471 pbcamera::StreamBuffer buffer;
14472 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014473 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014474 buffer.data = yuvBuffer->buffer;
14475 buffer.dataSize = yuvBuffer->frame_len;
14476
14477 pbcamera::CaptureRequest pbRequest;
14478 pbRequest.id = request.frame_number;
14479 pbRequest.outputBuffers.push_back(buffer);
14480
14481 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014482 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014483 if (res != OK) {
14484 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14485 strerror(-res), res);
14486 return false;
14487 }
14488
14489 hdrPlusRequest->yuvBuffer = yuvBuffer;
14490 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14491
14492 return true;
14493}
14494
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014495status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14496{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014497 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14498 return OK;
14499 }
14500
14501 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14502 if (res != OK) {
14503 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14504 strerror(-res), res);
14505 return res;
14506 }
14507 gHdrPlusClientOpening = true;
14508
14509 return OK;
14510}
14511
Chien-Yu Chenee335912017-02-09 17:53:20 -080014512status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14513{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014514 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014515
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014516 // Check if gHdrPlusClient is opened or being opened.
14517 if (gHdrPlusClient == nullptr) {
14518 if (gHdrPlusClientOpening) {
14519 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14520 return OK;
14521 }
14522
14523 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014524 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014525 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14526 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014527 return res;
14528 }
14529
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014530 // When opening HDR+ client completes, HDR+ mode will be enabled.
14531 return OK;
14532
Chien-Yu Chenee335912017-02-09 17:53:20 -080014533 }
14534
14535 // Configure stream for HDR+.
14536 res = configureHdrPlusStreamsLocked();
14537 if (res != OK) {
14538 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014539 return res;
14540 }
14541
14542 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14543 res = gHdrPlusClient->setZslHdrPlusMode(true);
14544 if (res != OK) {
14545 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014546 return res;
14547 }
14548
14549 mHdrPlusModeEnabled = true;
14550 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14551
14552 return OK;
14553}
14554
14555void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14556{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014557 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014558 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014559 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14560 if (res != OK) {
14561 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14562 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014563
14564 // Close HDR+ client so Easel can enter low power mode.
14565 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14566 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014567 }
14568
14569 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014570 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014571 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14572}
14573
14574status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014575{
14576 pbcamera::InputConfiguration inputConfig;
14577 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14578 status_t res = OK;
14579
14580 // Configure HDR+ client streams.
14581 // Get input config.
14582 if (mHdrPlusRawSrcChannel) {
14583 // HDR+ input buffers will be provided by HAL.
14584 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14585 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14586 if (res != OK) {
14587 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14588 __FUNCTION__, strerror(-res), res);
14589 return res;
14590 }
14591
14592 inputConfig.isSensorInput = false;
14593 } else {
14594 // Sensor MIPI will send data to Easel.
14595 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014596 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014597 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14598 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14599 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14600 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14601 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14602 if (mSensorModeInfo.num_raw_bits != 10) {
14603 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14604 mSensorModeInfo.num_raw_bits);
14605 return BAD_VALUE;
14606 }
14607
14608 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014609 }
14610
14611 // Get output configurations.
14612 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014613 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014614
14615 // Easel may need to output YUV output buffers if mPictureChannel was created.
14616 pbcamera::StreamConfiguration yuvOutputConfig;
14617 if (mPictureChannel != nullptr) {
14618 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14619 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14620 if (res != OK) {
14621 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14622 __FUNCTION__, strerror(-res), res);
14623
14624 return res;
14625 }
14626
14627 outputStreamConfigs.push_back(yuvOutputConfig);
14628 }
14629
14630 // TODO: consider other channels for YUV output buffers.
14631
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014632 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014633 if (res != OK) {
14634 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14635 strerror(-res), res);
14636 return res;
14637 }
14638
14639 return OK;
14640}
14641
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014642void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14643{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014644 if (client == nullptr) {
14645 ALOGE("%s: Opened client is null.", __FUNCTION__);
14646 return;
14647 }
14648
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014649 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014650 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14651
14652 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014653 if (!gHdrPlusClientOpening) {
14654 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14655 return;
14656 }
14657
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014658 gHdrPlusClient = std::move(client);
14659 gHdrPlusClientOpening = false;
14660
14661 // Set static metadata.
14662 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14663 if (res != OK) {
14664 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14665 __FUNCTION__, strerror(-res), res);
14666 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14667 gHdrPlusClient = nullptr;
14668 return;
14669 }
14670
14671 // Enable HDR+ mode.
14672 res = enableHdrPlusModeLocked();
14673 if (res != OK) {
14674 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14675 }
14676}
14677
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014678void QCamera3HardwareInterface::onOpenFailed(status_t err)
14679{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014680 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14681 Mutex::Autolock l(gHdrPlusClientLock);
14682 gHdrPlusClientOpening = false;
14683}
14684
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014685void QCamera3HardwareInterface::onFatalError()
14686{
14687 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14688
14689 // Set HAL state to error.
14690 pthread_mutex_lock(&mMutex);
14691 mState = ERROR;
14692 pthread_mutex_unlock(&mMutex);
14693
14694 handleCameraDeviceError();
14695}
14696
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014697void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014698 const camera_metadata_t &resultMetadata)
14699{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014700 if (result != nullptr) {
14701 if (result->outputBuffers.size() != 1) {
14702 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14703 result->outputBuffers.size());
14704 return;
14705 }
14706
14707 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14708 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14709 result->outputBuffers[0].streamId);
14710 return;
14711 }
14712
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014713 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014714 HdrPlusPendingRequest pendingRequest;
14715 {
14716 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14717 auto req = mHdrPlusPendingRequests.find(result->requestId);
14718 pendingRequest = req->second;
14719 }
14720
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014721 // Update the result metadata with the settings of the HDR+ still capture request because
14722 // the result metadata belongs to a ZSL buffer.
14723 CameraMetadata metadata;
14724 metadata = &resultMetadata;
14725 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14726 camera_metadata_t* updatedResultMetadata = metadata.release();
14727
14728 QCamera3PicChannel *picChannel =
14729 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14730
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014731 // Check if dumping HDR+ YUV output is enabled.
14732 char prop[PROPERTY_VALUE_MAX];
14733 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14734 bool dumpYuvOutput = atoi(prop);
14735
14736 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014737 // Dump yuv buffer to a ppm file.
14738 pbcamera::StreamConfiguration outputConfig;
14739 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14740 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14741 if (rc == OK) {
14742 char buf[FILENAME_MAX] = {};
14743 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14744 result->requestId, result->outputBuffers[0].streamId,
14745 outputConfig.image.width, outputConfig.image.height);
14746
14747 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14748 } else {
14749 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14750 __FUNCTION__, strerror(-rc), rc);
14751 }
14752 }
14753
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014754 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14755 auto halMetadata = std::make_shared<metadata_buffer_t>();
14756 clear_metadata_buffer(halMetadata.get());
14757
14758 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14759 // encoding.
14760 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14761 halStreamId, /*minFrameDuration*/0);
14762 if (res == OK) {
14763 // Return the buffer to pic channel for encoding.
14764 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14765 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14766 halMetadata);
14767 } else {
14768 // Return the buffer without encoding.
14769 // TODO: This should not happen but we may want to report an error buffer to camera
14770 // service.
14771 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14772 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14773 strerror(-res), res);
14774 }
14775
14776 // Send HDR+ metadata to framework.
14777 {
14778 pthread_mutex_lock(&mMutex);
14779
14780 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14781 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14782 pthread_mutex_unlock(&mMutex);
14783 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014784
14785 // Remove the HDR+ pending request.
14786 {
14787 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14788 auto req = mHdrPlusPendingRequests.find(result->requestId);
14789 mHdrPlusPendingRequests.erase(req);
14790 }
14791 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014792}
14793
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014794void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14795{
14796 if (failedResult == nullptr) {
14797 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14798 return;
14799 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014800
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014801 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014802
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014803 // Remove the pending HDR+ request.
14804 {
14805 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14806 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14807
14808 // Return the buffer to pic channel.
14809 QCamera3PicChannel *picChannel =
14810 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14811 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14812
14813 mHdrPlusPendingRequests.erase(pendingRequest);
14814 }
14815
14816 pthread_mutex_lock(&mMutex);
14817
14818 // Find the pending buffers.
14819 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14820 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14821 if (pendingBuffers->frame_number == failedResult->requestId) {
14822 break;
14823 }
14824 pendingBuffers++;
14825 }
14826
14827 // Send out buffer errors for the pending buffers.
14828 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14829 std::vector<camera3_stream_buffer_t> streamBuffers;
14830 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14831 // Prepare a stream buffer.
14832 camera3_stream_buffer_t streamBuffer = {};
14833 streamBuffer.stream = buffer.stream;
14834 streamBuffer.buffer = buffer.buffer;
14835 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14836 streamBuffer.acquire_fence = -1;
14837 streamBuffer.release_fence = -1;
14838
14839 streamBuffers.push_back(streamBuffer);
14840
14841 // Send out error buffer event.
14842 camera3_notify_msg_t notify_msg = {};
14843 notify_msg.type = CAMERA3_MSG_ERROR;
14844 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14845 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14846 notify_msg.message.error.error_stream = buffer.stream;
14847
14848 orchestrateNotify(&notify_msg);
14849 }
14850
14851 camera3_capture_result_t result = {};
14852 result.frame_number = pendingBuffers->frame_number;
14853 result.num_output_buffers = streamBuffers.size();
14854 result.output_buffers = &streamBuffers[0];
14855
14856 // Send out result with buffer errors.
14857 orchestrateResult(&result);
14858
14859 // Remove pending buffers.
14860 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14861 }
14862
14863 // Remove pending request.
14864 auto halRequest = mPendingRequestsList.begin();
14865 while (halRequest != mPendingRequestsList.end()) {
14866 if (halRequest->frame_number == failedResult->requestId) {
14867 mPendingRequestsList.erase(halRequest);
14868 break;
14869 }
14870 halRequest++;
14871 }
14872
14873 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014874}
14875
Thierry Strudel3d639192016-09-09 11:52:26 -070014876}; //end namespace qcamera