blob: 46d1c7da669e740a7819839da614e20985a113ba [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Thierry Strudel3d639192016-09-09 11:52:26 -0700136cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
137const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
138extern pthread_mutex_t gCamLock;
139volatile uint32_t gCamHal3LogLevel = 1;
140extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700141
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800142// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143// The following Easel related variables must be protected by gHdrPlusClientLock.
144EaselManagerClient gEaselManagerClient;
145bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
146std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
147bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700148bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700149bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800151// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
152bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
154Mutex gHdrPlusClientLock; // Protect above Easel related variables.
155
Thierry Strudel3d639192016-09-09 11:52:26 -0700156
157const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
158 {"On", CAM_CDS_MODE_ON},
159 {"Off", CAM_CDS_MODE_OFF},
160 {"Auto",CAM_CDS_MODE_AUTO}
161};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700162const QCamera3HardwareInterface::QCameraMap<
163 camera_metadata_enum_android_video_hdr_mode_t,
164 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
165 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
166 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
167};
168
Thierry Strudel54dc9782017-02-15 12:12:10 -0800169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_binning_correction_mode_t,
171 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
172 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
173 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
174};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700175
176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_ir_mode_t,
178 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
179 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
180 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
181 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
182};
Thierry Strudel3d639192016-09-09 11:52:26 -0700183
184const QCamera3HardwareInterface::QCameraMap<
185 camera_metadata_enum_android_control_effect_mode_t,
186 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
187 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
188 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
189 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
190 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
191 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
192 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
193 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
194 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
195 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
196};
197
198const QCamera3HardwareInterface::QCameraMap<
199 camera_metadata_enum_android_control_awb_mode_t,
200 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
201 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
202 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
203 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
204 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
205 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
206 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
207 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
208 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
209 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
210};
211
212const QCamera3HardwareInterface::QCameraMap<
213 camera_metadata_enum_android_control_scene_mode_t,
214 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
215 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
216 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
217 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
218 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
219 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
220 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
222 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
223 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
224 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
225 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
226 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
227 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
228 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
229 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800230 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
231 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700232};
233
234const QCamera3HardwareInterface::QCameraMap<
235 camera_metadata_enum_android_control_af_mode_t,
236 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
237 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
238 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
239 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
240 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
241 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
242 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
243 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
244};
245
246const QCamera3HardwareInterface::QCameraMap<
247 camera_metadata_enum_android_color_correction_aberration_mode_t,
248 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
249 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
250 CAM_COLOR_CORRECTION_ABERRATION_OFF },
251 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
252 CAM_COLOR_CORRECTION_ABERRATION_FAST },
253 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
254 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_control_ae_antibanding_mode_t,
259 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
260 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
261 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
262 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_control_ae_mode_t,
268 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
269 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
270 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
271 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
272 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
273 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
274};
275
276const QCamera3HardwareInterface::QCameraMap<
277 camera_metadata_enum_android_flash_mode_t,
278 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
279 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
280 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
281 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_statistics_face_detect_mode_t,
286 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
287 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
288 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
289 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
294 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
295 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
296 CAM_FOCUS_UNCALIBRATED },
297 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
298 CAM_FOCUS_APPROXIMATE },
299 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
300 CAM_FOCUS_CALIBRATED }
301};
302
303const QCamera3HardwareInterface::QCameraMap<
304 camera_metadata_enum_android_lens_state_t,
305 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
306 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
307 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
308};
309
310const int32_t available_thumbnail_sizes[] = {0, 0,
311 176, 144,
312 240, 144,
313 256, 144,
314 240, 160,
315 256, 154,
316 240, 240,
317 320, 240};
318
319const QCamera3HardwareInterface::QCameraMap<
320 camera_metadata_enum_android_sensor_test_pattern_mode_t,
321 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
322 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
323 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
324 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
328};
329
330/* Since there is no mapping for all the options some Android enum are not listed.
331 * Also, the order in this list is important because while mapping from HAL to Android it will
332 * traverse from lower to higher index which means that for HAL values that are map to different
333 * Android values, the traverse logic will select the first one found.
334 */
335const QCamera3HardwareInterface::QCameraMap<
336 camera_metadata_enum_android_sensor_reference_illuminant1_t,
337 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
338 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
339 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
340 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
354};
355
356const QCamera3HardwareInterface::QCameraMap<
357 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
358 { 60, CAM_HFR_MODE_60FPS},
359 { 90, CAM_HFR_MODE_90FPS},
360 { 120, CAM_HFR_MODE_120FPS},
361 { 150, CAM_HFR_MODE_150FPS},
362 { 180, CAM_HFR_MODE_180FPS},
363 { 210, CAM_HFR_MODE_210FPS},
364 { 240, CAM_HFR_MODE_240FPS},
365 { 480, CAM_HFR_MODE_480FPS},
366};
367
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700368const QCamera3HardwareInterface::QCameraMap<
369 qcamera3_ext_instant_aec_mode_t,
370 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
371 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
372 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
373 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
374};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800375
376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_exposure_meter_mode_t,
378 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
379 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
380 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
381 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
382 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
383 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
386};
387
388const QCamera3HardwareInterface::QCameraMap<
389 qcamera3_ext_iso_mode_t,
390 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
391 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
392 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
393 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
394 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
395 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
396 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
397 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
398 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
399};
400
Thierry Strudel3d639192016-09-09 11:52:26 -0700401camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
402 .initialize = QCamera3HardwareInterface::initialize,
403 .configure_streams = QCamera3HardwareInterface::configure_streams,
404 .register_stream_buffers = NULL,
405 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
406 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
407 .get_metadata_vendor_tag_ops = NULL,
408 .dump = QCamera3HardwareInterface::dump,
409 .flush = QCamera3HardwareInterface::flush,
410 .reserved = {0},
411};
412
413// initialise to some default value
414uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
415
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700416static inline void logEaselEvent(const char *tag, const char *event) {
417 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
418 struct timespec ts = {};
419 static int64_t kMsPerSec = 1000;
420 static int64_t kNsPerMs = 1000000;
421 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
422 if (res != OK) {
423 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
424 } else {
425 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
426 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
427 }
428 }
429}
430
Thierry Strudel3d639192016-09-09 11:52:26 -0700431/*===========================================================================
432 * FUNCTION : QCamera3HardwareInterface
433 *
434 * DESCRIPTION: constructor of QCamera3HardwareInterface
435 *
436 * PARAMETERS :
437 * @cameraId : camera ID
438 *
439 * RETURN : none
440 *==========================================================================*/
441QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
442 const camera_module_callbacks_t *callbacks)
443 : mCameraId(cameraId),
444 mCameraHandle(NULL),
445 mCameraInitialized(false),
446 mCallbackOps(NULL),
447 mMetadataChannel(NULL),
448 mPictureChannel(NULL),
449 mRawChannel(NULL),
450 mSupportChannel(NULL),
451 mAnalysisChannel(NULL),
452 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700453 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800455 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mChannelHandle(0),
458 mFirstConfiguration(true),
459 mFlush(false),
460 mFlushPerf(false),
461 mParamHeap(NULL),
462 mParameters(NULL),
463 mPrevParameters(NULL),
464 m_bIsVideo(false),
465 m_bIs4KVideo(false),
466 m_bEisSupportedSize(false),
467 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800468 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700469 m_MobicatMask(0),
470 mMinProcessedFrameDuration(0),
471 mMinJpegFrameDuration(0),
472 mMinRawFrameDuration(0),
473 mMetaFrameCount(0U),
474 mUpdateDebugLevel(false),
475 mCallbacks(callbacks),
476 mCaptureIntent(0),
477 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700478 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800479 /* DevCamDebug metadata internal m control*/
480 mDevCamDebugMetaEnable(0),
481 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mBatchSize(0),
483 mToBeQueuedVidBufs(0),
484 mHFRVideoFps(DEFAULT_VIDEO_FPS),
485 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800486 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800487 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700488 mFirstFrameNumberInBatch(0),
489 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800490 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700491 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
492 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000493 mPDSupported(false),
494 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700495 mInstantAEC(false),
496 mResetInstantAEC(false),
497 mInstantAECSettledFrameNumber(0),
498 mAecSkipDisplayFrameBound(0),
499 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800500 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700502 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700503 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700504 mState(CLOSED),
505 mIsDeviceLinked(false),
506 mIsMainCamera(true),
507 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700508 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800509 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800510 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700511 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800512 mIsApInputUsedForHdrPlus(false),
513 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800514 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700515{
516 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700517 mCommon.init(gCamCapability[cameraId]);
518 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700519#ifndef USE_HAL_3_3
520 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
521#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700522 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700523#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700524 mCameraDevice.common.close = close_camera_device;
525 mCameraDevice.ops = &mCameraOps;
526 mCameraDevice.priv = this;
527 gCamCapability[cameraId]->version = CAM_HAL_V3;
528 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
529 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
530 gCamCapability[cameraId]->min_num_pp_bufs = 3;
531
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800532 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700533
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800534 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700535 mPendingLiveRequest = 0;
536 mCurrentRequestId = -1;
537 pthread_mutex_init(&mMutex, NULL);
538
539 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
540 mDefaultMetadata[i] = NULL;
541
542 // Getting system props of different kinds
543 char prop[PROPERTY_VALUE_MAX];
544 memset(prop, 0, sizeof(prop));
545 property_get("persist.camera.raw.dump", prop, "0");
546 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800547 property_get("persist.camera.hal3.force.hdr", prop, "0");
548 mForceHdrSnapshot = atoi(prop);
549
Thierry Strudel3d639192016-09-09 11:52:26 -0700550 if (mEnableRawDump)
551 LOGD("Raw dump from Camera HAL enabled");
552
553 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
554 memset(mLdafCalib, 0, sizeof(mLdafCalib));
555
556 memset(prop, 0, sizeof(prop));
557 property_get("persist.camera.tnr.preview", prop, "0");
558 m_bTnrPreview = (uint8_t)atoi(prop);
559
560 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800561 property_get("persist.camera.swtnr.preview", prop, "1");
562 m_bSwTnrPreview = (uint8_t)atoi(prop);
563
564 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700565 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700566 m_bTnrVideo = (uint8_t)atoi(prop);
567
568 memset(prop, 0, sizeof(prop));
569 property_get("persist.camera.avtimer.debug", prop, "0");
570 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800571 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700572
Thierry Strudel54dc9782017-02-15 12:12:10 -0800573 memset(prop, 0, sizeof(prop));
574 property_get("persist.camera.cacmode.disable", prop, "0");
575 m_cacModeDisabled = (uint8_t)atoi(prop);
576
Thierry Strudel3d639192016-09-09 11:52:26 -0700577 //Load and read GPU library.
578 lib_surface_utils = NULL;
579 LINK_get_surface_pixel_alignment = NULL;
580 mSurfaceStridePadding = CAM_PAD_TO_32;
581 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
582 if (lib_surface_utils) {
583 *(void **)&LINK_get_surface_pixel_alignment =
584 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
585 if (LINK_get_surface_pixel_alignment) {
586 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
587 }
588 dlclose(lib_surface_utils);
589 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700590
Emilian Peev0f3c3162017-03-15 12:57:46 +0000591 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
592 mPDSupported = (0 <= mPDIndex) ? true : false;
593
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700594 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700595}
596
597/*===========================================================================
598 * FUNCTION : ~QCamera3HardwareInterface
599 *
600 * DESCRIPTION: destructor of QCamera3HardwareInterface
601 *
602 * PARAMETERS : none
603 *
604 * RETURN : none
605 *==========================================================================*/
606QCamera3HardwareInterface::~QCamera3HardwareInterface()
607{
608 LOGD("E");
609
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800610 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700611
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800612 // Disable power hint and enable the perf lock for close camera
613 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
614 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
615
616 // unlink of dualcam during close camera
617 if (mIsDeviceLinked) {
618 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
619 &m_pDualCamCmdPtr->bundle_info;
620 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
621 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
622 pthread_mutex_lock(&gCamLock);
623
624 if (mIsMainCamera == 1) {
625 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
626 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
627 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
628 // related session id should be session id of linked session
629 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
630 } else {
631 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
632 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
633 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
634 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
635 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800636 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800637 pthread_mutex_unlock(&gCamLock);
638
639 rc = mCameraHandle->ops->set_dual_cam_cmd(
640 mCameraHandle->camera_handle);
641 if (rc < 0) {
642 LOGE("Dualcam: Unlink failed, but still proceed to close");
643 }
644 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700645
646 /* We need to stop all streams before deleting any stream */
647 if (mRawDumpChannel) {
648 mRawDumpChannel->stop();
649 }
650
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700651 if (mHdrPlusRawSrcChannel) {
652 mHdrPlusRawSrcChannel->stop();
653 }
654
Thierry Strudel3d639192016-09-09 11:52:26 -0700655 // NOTE: 'camera3_stream_t *' objects are already freed at
656 // this stage by the framework
657 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
658 it != mStreamInfo.end(); it++) {
659 QCamera3ProcessingChannel *channel = (*it)->channel;
660 if (channel) {
661 channel->stop();
662 }
663 }
664 if (mSupportChannel)
665 mSupportChannel->stop();
666
667 if (mAnalysisChannel) {
668 mAnalysisChannel->stop();
669 }
670 if (mMetadataChannel) {
671 mMetadataChannel->stop();
672 }
673 if (mChannelHandle) {
674 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
675 mChannelHandle);
676 LOGD("stopping channel %d", mChannelHandle);
677 }
678
679 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
680 it != mStreamInfo.end(); it++) {
681 QCamera3ProcessingChannel *channel = (*it)->channel;
682 if (channel)
683 delete channel;
684 free (*it);
685 }
686 if (mSupportChannel) {
687 delete mSupportChannel;
688 mSupportChannel = NULL;
689 }
690
691 if (mAnalysisChannel) {
692 delete mAnalysisChannel;
693 mAnalysisChannel = NULL;
694 }
695 if (mRawDumpChannel) {
696 delete mRawDumpChannel;
697 mRawDumpChannel = NULL;
698 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700699 if (mHdrPlusRawSrcChannel) {
700 delete mHdrPlusRawSrcChannel;
701 mHdrPlusRawSrcChannel = NULL;
702 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700703 if (mDummyBatchChannel) {
704 delete mDummyBatchChannel;
705 mDummyBatchChannel = NULL;
706 }
707
708 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800709 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700710
711 if (mMetadataChannel) {
712 delete mMetadataChannel;
713 mMetadataChannel = NULL;
714 }
715
716 /* Clean up all channels */
717 if (mCameraInitialized) {
718 if(!mFirstConfiguration){
719 //send the last unconfigure
720 cam_stream_size_info_t stream_config_info;
721 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
722 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
723 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800724 m_bIs4KVideo ? 0 :
725 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700726 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700727 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
728 stream_config_info);
729 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
730 if (rc < 0) {
731 LOGE("set_parms failed for unconfigure");
732 }
733 }
734 deinitParameters();
735 }
736
737 if (mChannelHandle) {
738 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
739 mChannelHandle);
740 LOGH("deleting channel %d", mChannelHandle);
741 mChannelHandle = 0;
742 }
743
744 if (mState != CLOSED)
745 closeCamera();
746
747 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
748 req.mPendingBufferList.clear();
749 }
750 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700751 for (pendingRequestIterator i = mPendingRequestsList.begin();
752 i != mPendingRequestsList.end();) {
753 i = erasePendingRequest(i);
754 }
755 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
756 if (mDefaultMetadata[i])
757 free_camera_metadata(mDefaultMetadata[i]);
758
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800759 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700760
761 pthread_cond_destroy(&mRequestCond);
762
763 pthread_cond_destroy(&mBuffersCond);
764
765 pthread_mutex_destroy(&mMutex);
766 LOGD("X");
767}
768
769/*===========================================================================
770 * FUNCTION : erasePendingRequest
771 *
772 * DESCRIPTION: function to erase a desired pending request after freeing any
773 * allocated memory
774 *
775 * PARAMETERS :
776 * @i : iterator pointing to pending request to be erased
777 *
778 * RETURN : iterator pointing to the next request
779 *==========================================================================*/
780QCamera3HardwareInterface::pendingRequestIterator
781 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
782{
783 if (i->input_buffer != NULL) {
784 free(i->input_buffer);
785 i->input_buffer = NULL;
786 }
787 if (i->settings != NULL)
788 free_camera_metadata((camera_metadata_t*)i->settings);
789 return mPendingRequestsList.erase(i);
790}
791
792/*===========================================================================
793 * FUNCTION : camEvtHandle
794 *
795 * DESCRIPTION: Function registered to mm-camera-interface to handle events
796 *
797 * PARAMETERS :
798 * @camera_handle : interface layer camera handle
799 * @evt : ptr to event
800 * @user_data : user data ptr
801 *
802 * RETURN : none
803 *==========================================================================*/
804void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
805 mm_camera_event_t *evt,
806 void *user_data)
807{
808 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
809 if (obj && evt) {
810 switch(evt->server_event_type) {
811 case CAM_EVENT_TYPE_DAEMON_DIED:
812 pthread_mutex_lock(&obj->mMutex);
813 obj->mState = ERROR;
814 pthread_mutex_unlock(&obj->mMutex);
815 LOGE("Fatal, camera daemon died");
816 break;
817
818 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
819 LOGD("HAL got request pull from Daemon");
820 pthread_mutex_lock(&obj->mMutex);
821 obj->mWokenUpByDaemon = true;
822 obj->unblockRequestIfNecessary();
823 pthread_mutex_unlock(&obj->mMutex);
824 break;
825
826 default:
827 LOGW("Warning: Unhandled event %d",
828 evt->server_event_type);
829 break;
830 }
831 } else {
832 LOGE("NULL user_data/evt");
833 }
834}
835
836/*===========================================================================
837 * FUNCTION : openCamera
838 *
839 * DESCRIPTION: open camera
840 *
841 * PARAMETERS :
842 * @hw_device : double ptr for camera device struct
843 *
844 * RETURN : int32_t type of status
845 * NO_ERROR -- success
846 * none-zero failure code
847 *==========================================================================*/
848int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
849{
850 int rc = 0;
851 if (mState != CLOSED) {
852 *hw_device = NULL;
853 return PERMISSION_DENIED;
854 }
855
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800856 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700857 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
858 mCameraId);
859
860 rc = openCamera();
861 if (rc == 0) {
862 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800863 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700864 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800865 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700866
Thierry Strudel3d639192016-09-09 11:52:26 -0700867 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
868 mCameraId, rc);
869
870 if (rc == NO_ERROR) {
871 mState = OPENED;
872 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800873
Thierry Strudel3d639192016-09-09 11:52:26 -0700874 return rc;
875}
876
877/*===========================================================================
878 * FUNCTION : openCamera
879 *
880 * DESCRIPTION: open camera
881 *
882 * PARAMETERS : none
883 *
884 * RETURN : int32_t type of status
885 * NO_ERROR -- success
886 * none-zero failure code
887 *==========================================================================*/
888int QCamera3HardwareInterface::openCamera()
889{
890 int rc = 0;
891 char value[PROPERTY_VALUE_MAX];
892
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800893 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700894 if (mCameraHandle) {
895 LOGE("Failure: Camera already opened");
896 return ALREADY_EXISTS;
897 }
898
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700899 {
900 Mutex::Autolock l(gHdrPlusClientLock);
901 if (gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700902 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700903 rc = gEaselManagerClient.resume();
904 if (rc != 0) {
905 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
906 return rc;
907 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800908 }
909 }
910
Thierry Strudel3d639192016-09-09 11:52:26 -0700911 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
912 if (rc < 0) {
913 LOGE("Failed to reserve flash for camera id: %d",
914 mCameraId);
915 return UNKNOWN_ERROR;
916 }
917
918 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
919 if (rc) {
920 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
921 return rc;
922 }
923
924 if (!mCameraHandle) {
925 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
926 return -ENODEV;
927 }
928
929 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
930 camEvtHandle, (void *)this);
931
932 if (rc < 0) {
933 LOGE("Error, failed to register event callback");
934 /* Not closing camera here since it is already handled in destructor */
935 return FAILED_TRANSACTION;
936 }
937
938 mExifParams.debug_params =
939 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
940 if (mExifParams.debug_params) {
941 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
942 } else {
943 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
944 return NO_MEMORY;
945 }
946 mFirstConfiguration = true;
947
948 //Notify display HAL that a camera session is active.
949 //But avoid calling the same during bootup because camera service might open/close
950 //cameras at boot time during its initialization and display service will also internally
951 //wait for camera service to initialize first while calling this display API, resulting in a
952 //deadlock situation. Since boot time camera open/close calls are made only to fetch
953 //capabilities, no need of this display bw optimization.
954 //Use "service.bootanim.exit" property to know boot status.
955 property_get("service.bootanim.exit", value, "0");
956 if (atoi(value) == 1) {
957 pthread_mutex_lock(&gCamLock);
958 if (gNumCameraSessions++ == 0) {
959 setCameraLaunchStatus(true);
960 }
961 pthread_mutex_unlock(&gCamLock);
962 }
963
964 //fill the session id needed while linking dual cam
965 pthread_mutex_lock(&gCamLock);
966 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
967 &sessionId[mCameraId]);
968 pthread_mutex_unlock(&gCamLock);
969
970 if (rc < 0) {
971 LOGE("Error, failed to get sessiion id");
972 return UNKNOWN_ERROR;
973 } else {
974 //Allocate related cam sync buffer
975 //this is needed for the payload that goes along with bundling cmd for related
976 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700977 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
978 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700979 if(rc != OK) {
980 rc = NO_MEMORY;
981 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
982 return NO_MEMORY;
983 }
984
985 //Map memory for related cam sync buffer
986 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700987 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
988 m_pDualCamCmdHeap->getFd(0),
989 sizeof(cam_dual_camera_cmd_info_t),
990 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700991 if(rc < 0) {
992 LOGE("Dualcam: failed to map Related cam sync buffer");
993 rc = FAILED_TRANSACTION;
994 return NO_MEMORY;
995 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700996 m_pDualCamCmdPtr =
997 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700998 }
999
1000 LOGH("mCameraId=%d",mCameraId);
1001
1002 return NO_ERROR;
1003}
1004
1005/*===========================================================================
1006 * FUNCTION : closeCamera
1007 *
1008 * DESCRIPTION: close camera
1009 *
1010 * PARAMETERS : none
1011 *
1012 * RETURN : int32_t type of status
1013 * NO_ERROR -- success
1014 * none-zero failure code
1015 *==========================================================================*/
1016int QCamera3HardwareInterface::closeCamera()
1017{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001018 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001019 int rc = NO_ERROR;
1020 char value[PROPERTY_VALUE_MAX];
1021
1022 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1023 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001024
1025 // unmap memory for related cam sync buffer
1026 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001027 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001028 if (NULL != m_pDualCamCmdHeap) {
1029 m_pDualCamCmdHeap->deallocate();
1030 delete m_pDualCamCmdHeap;
1031 m_pDualCamCmdHeap = NULL;
1032 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001033 }
1034
Thierry Strudel3d639192016-09-09 11:52:26 -07001035 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1036 mCameraHandle = NULL;
1037
1038 //reset session id to some invalid id
1039 pthread_mutex_lock(&gCamLock);
1040 sessionId[mCameraId] = 0xDEADBEEF;
1041 pthread_mutex_unlock(&gCamLock);
1042
1043 //Notify display HAL that there is no active camera session
1044 //but avoid calling the same during bootup. Refer to openCamera
1045 //for more details.
1046 property_get("service.bootanim.exit", value, "0");
1047 if (atoi(value) == 1) {
1048 pthread_mutex_lock(&gCamLock);
1049 if (--gNumCameraSessions == 0) {
1050 setCameraLaunchStatus(false);
1051 }
1052 pthread_mutex_unlock(&gCamLock);
1053 }
1054
Thierry Strudel3d639192016-09-09 11:52:26 -07001055 if (mExifParams.debug_params) {
1056 free(mExifParams.debug_params);
1057 mExifParams.debug_params = NULL;
1058 }
1059 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1060 LOGW("Failed to release flash for camera id: %d",
1061 mCameraId);
1062 }
1063 mState = CLOSED;
1064 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1065 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001066
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001067 {
1068 Mutex::Autolock l(gHdrPlusClientLock);
1069 if (gHdrPlusClient != nullptr) {
1070 // Disable HDR+ mode.
1071 disableHdrPlusModeLocked();
1072 // Disconnect Easel if it's connected.
1073 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1074 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001075 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001076
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001077 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001078 rc = gEaselManagerClient.stopMipi(mCameraId);
1079 if (rc != 0) {
1080 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1081 }
1082
1083 rc = gEaselManagerClient.suspend();
1084 if (rc != 0) {
1085 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1086 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001087 }
1088 }
1089
Thierry Strudel3d639192016-09-09 11:52:26 -07001090 return rc;
1091}
1092
1093/*===========================================================================
1094 * FUNCTION : initialize
1095 *
1096 * DESCRIPTION: Initialize frameworks callback functions
1097 *
1098 * PARAMETERS :
1099 * @callback_ops : callback function to frameworks
1100 *
1101 * RETURN :
1102 *
1103 *==========================================================================*/
1104int QCamera3HardwareInterface::initialize(
1105 const struct camera3_callback_ops *callback_ops)
1106{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001107 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001108 int rc;
1109
1110 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1111 pthread_mutex_lock(&mMutex);
1112
1113 // Validate current state
1114 switch (mState) {
1115 case OPENED:
1116 /* valid state */
1117 break;
1118 default:
1119 LOGE("Invalid state %d", mState);
1120 rc = -ENODEV;
1121 goto err1;
1122 }
1123
1124 rc = initParameters();
1125 if (rc < 0) {
1126 LOGE("initParamters failed %d", rc);
1127 goto err1;
1128 }
1129 mCallbackOps = callback_ops;
1130
1131 mChannelHandle = mCameraHandle->ops->add_channel(
1132 mCameraHandle->camera_handle, NULL, NULL, this);
1133 if (mChannelHandle == 0) {
1134 LOGE("add_channel failed");
1135 rc = -ENOMEM;
1136 pthread_mutex_unlock(&mMutex);
1137 return rc;
1138 }
1139
1140 pthread_mutex_unlock(&mMutex);
1141 mCameraInitialized = true;
1142 mState = INITIALIZED;
1143 LOGI("X");
1144 return 0;
1145
1146err1:
1147 pthread_mutex_unlock(&mMutex);
1148 return rc;
1149}
1150
1151/*===========================================================================
1152 * FUNCTION : validateStreamDimensions
1153 *
1154 * DESCRIPTION: Check if the configuration requested are those advertised
1155 *
1156 * PARAMETERS :
1157 * @stream_list : streams to be configured
1158 *
1159 * RETURN :
1160 *
1161 *==========================================================================*/
1162int QCamera3HardwareInterface::validateStreamDimensions(
1163 camera3_stream_configuration_t *streamList)
1164{
1165 int rc = NO_ERROR;
1166 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001167 uint32_t depthWidth = 0;
1168 uint32_t depthHeight = 0;
1169 if (mPDSupported) {
1170 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1171 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1172 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001173
1174 camera3_stream_t *inputStream = NULL;
1175 /*
1176 * Loop through all streams to find input stream if it exists*
1177 */
1178 for (size_t i = 0; i< streamList->num_streams; i++) {
1179 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1180 if (inputStream != NULL) {
1181 LOGE("Error, Multiple input streams requested");
1182 return -EINVAL;
1183 }
1184 inputStream = streamList->streams[i];
1185 }
1186 }
1187 /*
1188 * Loop through all streams requested in configuration
1189 * Check if unsupported sizes have been requested on any of them
1190 */
1191 for (size_t j = 0; j < streamList->num_streams; j++) {
1192 bool sizeFound = false;
1193 camera3_stream_t *newStream = streamList->streams[j];
1194
1195 uint32_t rotatedHeight = newStream->height;
1196 uint32_t rotatedWidth = newStream->width;
1197 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1198 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1199 rotatedHeight = newStream->width;
1200 rotatedWidth = newStream->height;
1201 }
1202
1203 /*
1204 * Sizes are different for each type of stream format check against
1205 * appropriate table.
1206 */
1207 switch (newStream->format) {
1208 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1209 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1210 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001211 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1212 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1213 mPDSupported) {
1214 if ((depthWidth == newStream->width) &&
1215 (depthHeight == newStream->height)) {
1216 sizeFound = true;
1217 }
1218 break;
1219 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001220 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1221 for (size_t i = 0; i < count; i++) {
1222 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1223 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1224 sizeFound = true;
1225 break;
1226 }
1227 }
1228 break;
1229 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001230 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1231 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001232 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001233 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001234 if ((depthSamplesCount == newStream->width) &&
1235 (1 == newStream->height)) {
1236 sizeFound = true;
1237 }
1238 break;
1239 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001240 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1241 /* Verify set size against generated sizes table */
1242 for (size_t i = 0; i < count; i++) {
1243 if (((int32_t)rotatedWidth ==
1244 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1245 ((int32_t)rotatedHeight ==
1246 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1247 sizeFound = true;
1248 break;
1249 }
1250 }
1251 break;
1252 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1253 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1254 default:
1255 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1256 || newStream->stream_type == CAMERA3_STREAM_INPUT
1257 || IS_USAGE_ZSL(newStream->usage)) {
1258 if (((int32_t)rotatedWidth ==
1259 gCamCapability[mCameraId]->active_array_size.width) &&
1260 ((int32_t)rotatedHeight ==
1261 gCamCapability[mCameraId]->active_array_size.height)) {
1262 sizeFound = true;
1263 break;
1264 }
1265 /* We could potentially break here to enforce ZSL stream
1266 * set from frameworks always is full active array size
1267 * but it is not clear from the spc if framework will always
1268 * follow that, also we have logic to override to full array
1269 * size, so keeping the logic lenient at the moment
1270 */
1271 }
1272 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1273 MAX_SIZES_CNT);
1274 for (size_t i = 0; i < count; i++) {
1275 if (((int32_t)rotatedWidth ==
1276 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1277 ((int32_t)rotatedHeight ==
1278 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1279 sizeFound = true;
1280 break;
1281 }
1282 }
1283 break;
1284 } /* End of switch(newStream->format) */
1285
1286 /* We error out even if a single stream has unsupported size set */
1287 if (!sizeFound) {
1288 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1289 rotatedWidth, rotatedHeight, newStream->format,
1290 gCamCapability[mCameraId]->active_array_size.width,
1291 gCamCapability[mCameraId]->active_array_size.height);
1292 rc = -EINVAL;
1293 break;
1294 }
1295 } /* End of for each stream */
1296 return rc;
1297}
1298
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001299/*===========================================================================
1300 * FUNCTION : validateUsageFlags
1301 *
1302 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1303 *
1304 * PARAMETERS :
1305 * @stream_list : streams to be configured
1306 *
1307 * RETURN :
1308 * NO_ERROR if the usage flags are supported
1309 * error code if usage flags are not supported
1310 *
1311 *==========================================================================*/
1312int QCamera3HardwareInterface::validateUsageFlags(
1313 const camera3_stream_configuration_t* streamList)
1314{
1315 for (size_t j = 0; j < streamList->num_streams; j++) {
1316 const camera3_stream_t *newStream = streamList->streams[j];
1317
1318 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1319 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1320 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1321 continue;
1322 }
1323
1324 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1325 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1326 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1327 bool forcePreviewUBWC = true;
1328 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1329 forcePreviewUBWC = false;
1330 }
1331 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1332 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1333 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1334 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1335 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1336 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1337
1338 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1339 // So color spaces will always match.
1340
1341 // Check whether underlying formats of shared streams match.
1342 if (isVideo && isPreview && videoFormat != previewFormat) {
1343 LOGE("Combined video and preview usage flag is not supported");
1344 return -EINVAL;
1345 }
1346 if (isPreview && isZSL && previewFormat != zslFormat) {
1347 LOGE("Combined preview and zsl usage flag is not supported");
1348 return -EINVAL;
1349 }
1350 if (isVideo && isZSL && videoFormat != zslFormat) {
1351 LOGE("Combined video and zsl usage flag is not supported");
1352 return -EINVAL;
1353 }
1354 }
1355 return NO_ERROR;
1356}
1357
1358/*===========================================================================
1359 * FUNCTION : validateUsageFlagsForEis
1360 *
1361 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1362 *
1363 * PARAMETERS :
1364 * @stream_list : streams to be configured
1365 *
1366 * RETURN :
1367 * NO_ERROR if the usage flags are supported
1368 * error code if usage flags are not supported
1369 *
1370 *==========================================================================*/
1371int QCamera3HardwareInterface::validateUsageFlagsForEis(
1372 const camera3_stream_configuration_t* streamList)
1373{
1374 for (size_t j = 0; j < streamList->num_streams; j++) {
1375 const camera3_stream_t *newStream = streamList->streams[j];
1376
1377 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1378 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1379
1380 // Because EIS is "hard-coded" for certain use case, and current
1381 // implementation doesn't support shared preview and video on the same
1382 // stream, return failure if EIS is forced on.
1383 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1384 LOGE("Combined video and preview usage flag is not supported due to EIS");
1385 return -EINVAL;
1386 }
1387 }
1388 return NO_ERROR;
1389}
1390
Thierry Strudel3d639192016-09-09 11:52:26 -07001391/*==============================================================================
1392 * FUNCTION : isSupportChannelNeeded
1393 *
1394 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1395 *
1396 * PARAMETERS :
1397 * @stream_list : streams to be configured
1398 * @stream_config_info : the config info for streams to be configured
1399 *
1400 * RETURN : Boolen true/false decision
1401 *
1402 *==========================================================================*/
1403bool QCamera3HardwareInterface::isSupportChannelNeeded(
1404 camera3_stream_configuration_t *streamList,
1405 cam_stream_size_info_t stream_config_info)
1406{
1407 uint32_t i;
1408 bool pprocRequested = false;
1409 /* Check for conditions where PProc pipeline does not have any streams*/
1410 for (i = 0; i < stream_config_info.num_streams; i++) {
1411 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1412 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1413 pprocRequested = true;
1414 break;
1415 }
1416 }
1417
1418 if (pprocRequested == false )
1419 return true;
1420
1421 /* Dummy stream needed if only raw or jpeg streams present */
1422 for (i = 0; i < streamList->num_streams; i++) {
1423 switch(streamList->streams[i]->format) {
1424 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1425 case HAL_PIXEL_FORMAT_RAW10:
1426 case HAL_PIXEL_FORMAT_RAW16:
1427 case HAL_PIXEL_FORMAT_BLOB:
1428 break;
1429 default:
1430 return false;
1431 }
1432 }
1433 return true;
1434}
1435
1436/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001437 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001438 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001439 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001440 *
1441 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001442 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001443 *
1444 * RETURN : int32_t type of status
1445 * NO_ERROR -- success
1446 * none-zero failure code
1447 *
1448 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001449int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001450{
1451 int32_t rc = NO_ERROR;
1452
1453 cam_dimension_t max_dim = {0, 0};
1454 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1455 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1456 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1457 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1458 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1459 }
1460
1461 clear_metadata_buffer(mParameters);
1462
1463 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1464 max_dim);
1465 if (rc != NO_ERROR) {
1466 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1467 return rc;
1468 }
1469
1470 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1471 if (rc != NO_ERROR) {
1472 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1473 return rc;
1474 }
1475
1476 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001477 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001478
1479 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1480 mParameters);
1481 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001482 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001483 return rc;
1484 }
1485
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001486 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001487 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1488 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1489 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1490 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1491 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001492
1493 return rc;
1494}
1495
1496/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001497 * FUNCTION : addToPPFeatureMask
1498 *
1499 * DESCRIPTION: add additional features to pp feature mask based on
1500 * stream type and usecase
1501 *
1502 * PARAMETERS :
1503 * @stream_format : stream type for feature mask
1504 * @stream_idx : stream idx within postprocess_mask list to change
1505 *
1506 * RETURN : NULL
1507 *
1508 *==========================================================================*/
1509void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1510 uint32_t stream_idx)
1511{
1512 char feature_mask_value[PROPERTY_VALUE_MAX];
1513 cam_feature_mask_t feature_mask;
1514 int args_converted;
1515 int property_len;
1516
1517 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001518#ifdef _LE_CAMERA_
1519 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1520 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1521 property_len = property_get("persist.camera.hal3.feature",
1522 feature_mask_value, swtnr_feature_mask_value);
1523#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001524 property_len = property_get("persist.camera.hal3.feature",
1525 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001526#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001527 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1528 (feature_mask_value[1] == 'x')) {
1529 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1530 } else {
1531 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1532 }
1533 if (1 != args_converted) {
1534 feature_mask = 0;
1535 LOGE("Wrong feature mask %s", feature_mask_value);
1536 return;
1537 }
1538
1539 switch (stream_format) {
1540 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1541 /* Add LLVD to pp feature mask only if video hint is enabled */
1542 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1543 mStreamConfigInfo.postprocess_mask[stream_idx]
1544 |= CAM_QTI_FEATURE_SW_TNR;
1545 LOGH("Added SW TNR to pp feature mask");
1546 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1547 mStreamConfigInfo.postprocess_mask[stream_idx]
1548 |= CAM_QCOM_FEATURE_LLVD;
1549 LOGH("Added LLVD SeeMore to pp feature mask");
1550 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001551 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1552 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1553 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1554 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001555 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1556 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1557 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1558 CAM_QTI_FEATURE_BINNING_CORRECTION;
1559 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001560 break;
1561 }
1562 default:
1563 break;
1564 }
1565 LOGD("PP feature mask %llx",
1566 mStreamConfigInfo.postprocess_mask[stream_idx]);
1567}
1568
1569/*==============================================================================
1570 * FUNCTION : updateFpsInPreviewBuffer
1571 *
1572 * DESCRIPTION: update FPS information in preview buffer.
1573 *
1574 * PARAMETERS :
1575 * @metadata : pointer to metadata buffer
1576 * @frame_number: frame_number to look for in pending buffer list
1577 *
1578 * RETURN : None
1579 *
1580 *==========================================================================*/
1581void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1582 uint32_t frame_number)
1583{
1584 // Mark all pending buffers for this particular request
1585 // with corresponding framerate information
1586 for (List<PendingBuffersInRequest>::iterator req =
1587 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1588 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1589 for(List<PendingBufferInfo>::iterator j =
1590 req->mPendingBufferList.begin();
1591 j != req->mPendingBufferList.end(); j++) {
1592 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1593 if ((req->frame_number == frame_number) &&
1594 (channel->getStreamTypeMask() &
1595 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1596 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1597 CAM_INTF_PARM_FPS_RANGE, metadata) {
1598 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1599 struct private_handle_t *priv_handle =
1600 (struct private_handle_t *)(*(j->buffer));
1601 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1602 }
1603 }
1604 }
1605 }
1606}
1607
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001608/*==============================================================================
1609 * FUNCTION : updateTimeStampInPendingBuffers
1610 *
1611 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1612 * of a frame number
1613 *
1614 * PARAMETERS :
1615 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1616 * @timestamp : timestamp to be set
1617 *
1618 * RETURN : None
1619 *
1620 *==========================================================================*/
1621void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1622 uint32_t frameNumber, nsecs_t timestamp)
1623{
1624 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1625 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1626 if (req->frame_number != frameNumber)
1627 continue;
1628
1629 for (auto k = req->mPendingBufferList.begin();
1630 k != req->mPendingBufferList.end(); k++ ) {
1631 struct private_handle_t *priv_handle =
1632 (struct private_handle_t *) (*(k->buffer));
1633 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1634 }
1635 }
1636 return;
1637}
1638
Thierry Strudel3d639192016-09-09 11:52:26 -07001639/*===========================================================================
1640 * FUNCTION : configureStreams
1641 *
1642 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1643 * and output streams.
1644 *
1645 * PARAMETERS :
1646 * @stream_list : streams to be configured
1647 *
1648 * RETURN :
1649 *
1650 *==========================================================================*/
1651int QCamera3HardwareInterface::configureStreams(
1652 camera3_stream_configuration_t *streamList)
1653{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001654 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001655 int rc = 0;
1656
1657 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001658 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001659 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001660 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001661
1662 return rc;
1663}
1664
1665/*===========================================================================
1666 * FUNCTION : configureStreamsPerfLocked
1667 *
1668 * DESCRIPTION: configureStreams while perfLock is held.
1669 *
1670 * PARAMETERS :
1671 * @stream_list : streams to be configured
1672 *
1673 * RETURN : int32_t type of status
1674 * NO_ERROR -- success
1675 * none-zero failure code
1676 *==========================================================================*/
1677int QCamera3HardwareInterface::configureStreamsPerfLocked(
1678 camera3_stream_configuration_t *streamList)
1679{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001680 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001681 int rc = 0;
1682
1683 // Sanity check stream_list
1684 if (streamList == NULL) {
1685 LOGE("NULL stream configuration");
1686 return BAD_VALUE;
1687 }
1688 if (streamList->streams == NULL) {
1689 LOGE("NULL stream list");
1690 return BAD_VALUE;
1691 }
1692
1693 if (streamList->num_streams < 1) {
1694 LOGE("Bad number of streams requested: %d",
1695 streamList->num_streams);
1696 return BAD_VALUE;
1697 }
1698
1699 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1700 LOGE("Maximum number of streams %d exceeded: %d",
1701 MAX_NUM_STREAMS, streamList->num_streams);
1702 return BAD_VALUE;
1703 }
1704
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001705 rc = validateUsageFlags(streamList);
1706 if (rc != NO_ERROR) {
1707 return rc;
1708 }
1709
Thierry Strudel3d639192016-09-09 11:52:26 -07001710 mOpMode = streamList->operation_mode;
1711 LOGD("mOpMode: %d", mOpMode);
1712
1713 /* first invalidate all the steams in the mStreamList
1714 * if they appear again, they will be validated */
1715 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1716 it != mStreamInfo.end(); it++) {
1717 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1718 if (channel) {
1719 channel->stop();
1720 }
1721 (*it)->status = INVALID;
1722 }
1723
1724 if (mRawDumpChannel) {
1725 mRawDumpChannel->stop();
1726 delete mRawDumpChannel;
1727 mRawDumpChannel = NULL;
1728 }
1729
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001730 if (mHdrPlusRawSrcChannel) {
1731 mHdrPlusRawSrcChannel->stop();
1732 delete mHdrPlusRawSrcChannel;
1733 mHdrPlusRawSrcChannel = NULL;
1734 }
1735
Thierry Strudel3d639192016-09-09 11:52:26 -07001736 if (mSupportChannel)
1737 mSupportChannel->stop();
1738
1739 if (mAnalysisChannel) {
1740 mAnalysisChannel->stop();
1741 }
1742 if (mMetadataChannel) {
1743 /* If content of mStreamInfo is not 0, there is metadata stream */
1744 mMetadataChannel->stop();
1745 }
1746 if (mChannelHandle) {
1747 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1748 mChannelHandle);
1749 LOGD("stopping channel %d", mChannelHandle);
1750 }
1751
1752 pthread_mutex_lock(&mMutex);
1753
1754 // Check state
1755 switch (mState) {
1756 case INITIALIZED:
1757 case CONFIGURED:
1758 case STARTED:
1759 /* valid state */
1760 break;
1761 default:
1762 LOGE("Invalid state %d", mState);
1763 pthread_mutex_unlock(&mMutex);
1764 return -ENODEV;
1765 }
1766
1767 /* Check whether we have video stream */
1768 m_bIs4KVideo = false;
1769 m_bIsVideo = false;
1770 m_bEisSupportedSize = false;
1771 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001772 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001773 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001774 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001775 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001776 uint32_t videoWidth = 0U;
1777 uint32_t videoHeight = 0U;
1778 size_t rawStreamCnt = 0;
1779 size_t stallStreamCnt = 0;
1780 size_t processedStreamCnt = 0;
1781 // Number of streams on ISP encoder path
1782 size_t numStreamsOnEncoder = 0;
1783 size_t numYuv888OnEncoder = 0;
1784 bool bYuv888OverrideJpeg = false;
1785 cam_dimension_t largeYuv888Size = {0, 0};
1786 cam_dimension_t maxViewfinderSize = {0, 0};
1787 bool bJpegExceeds4K = false;
1788 bool bJpegOnEncoder = false;
1789 bool bUseCommonFeatureMask = false;
1790 cam_feature_mask_t commonFeatureMask = 0;
1791 bool bSmallJpegSize = false;
1792 uint32_t width_ratio;
1793 uint32_t height_ratio;
1794 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1795 camera3_stream_t *inputStream = NULL;
1796 bool isJpeg = false;
1797 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001798 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001799 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001800
1801 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1802
1803 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001804 uint8_t eis_prop_set;
1805 uint32_t maxEisWidth = 0;
1806 uint32_t maxEisHeight = 0;
1807
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001808 // Initialize all instant AEC related variables
1809 mInstantAEC = false;
1810 mResetInstantAEC = false;
1811 mInstantAECSettledFrameNumber = 0;
1812 mAecSkipDisplayFrameBound = 0;
1813 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001814 mCurrFeatureState = 0;
1815 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001816
Thierry Strudel3d639192016-09-09 11:52:26 -07001817 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1818
1819 size_t count = IS_TYPE_MAX;
1820 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1821 for (size_t i = 0; i < count; i++) {
1822 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001823 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1824 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001825 break;
1826 }
1827 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001828
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001829 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001830 maxEisWidth = MAX_EIS_WIDTH;
1831 maxEisHeight = MAX_EIS_HEIGHT;
1832 }
1833
1834 /* EIS setprop control */
1835 char eis_prop[PROPERTY_VALUE_MAX];
1836 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001837 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001838 eis_prop_set = (uint8_t)atoi(eis_prop);
1839
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001840 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001841 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1842
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001843 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1844 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001845
Thierry Strudel3d639192016-09-09 11:52:26 -07001846 /* stream configurations */
1847 for (size_t i = 0; i < streamList->num_streams; i++) {
1848 camera3_stream_t *newStream = streamList->streams[i];
1849 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1850 "height = %d, rotation = %d, usage = 0x%x",
1851 i, newStream->stream_type, newStream->format,
1852 newStream->width, newStream->height, newStream->rotation,
1853 newStream->usage);
1854 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1855 newStream->stream_type == CAMERA3_STREAM_INPUT){
1856 isZsl = true;
1857 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001858 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1859 IS_USAGE_PREVIEW(newStream->usage)) {
1860 isPreview = true;
1861 }
1862
Thierry Strudel3d639192016-09-09 11:52:26 -07001863 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1864 inputStream = newStream;
1865 }
1866
Emilian Peev7650c122017-01-19 08:24:33 -08001867 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1868 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001869 isJpeg = true;
1870 jpegSize.width = newStream->width;
1871 jpegSize.height = newStream->height;
1872 if (newStream->width > VIDEO_4K_WIDTH ||
1873 newStream->height > VIDEO_4K_HEIGHT)
1874 bJpegExceeds4K = true;
1875 }
1876
1877 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1878 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1879 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001880 // In HAL3 we can have multiple different video streams.
1881 // The variables video width and height are used below as
1882 // dimensions of the biggest of them
1883 if (videoWidth < newStream->width ||
1884 videoHeight < newStream->height) {
1885 videoWidth = newStream->width;
1886 videoHeight = newStream->height;
1887 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001888 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1889 (VIDEO_4K_HEIGHT <= newStream->height)) {
1890 m_bIs4KVideo = true;
1891 }
1892 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1893 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001894
Thierry Strudel3d639192016-09-09 11:52:26 -07001895 }
1896 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1897 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1898 switch (newStream->format) {
1899 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001900 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1901 depthPresent = true;
1902 break;
1903 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001904 stallStreamCnt++;
1905 if (isOnEncoder(maxViewfinderSize, newStream->width,
1906 newStream->height)) {
1907 numStreamsOnEncoder++;
1908 bJpegOnEncoder = true;
1909 }
1910 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1911 newStream->width);
1912 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1913 newStream->height);;
1914 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1915 "FATAL: max_downscale_factor cannot be zero and so assert");
1916 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1917 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1918 LOGH("Setting small jpeg size flag to true");
1919 bSmallJpegSize = true;
1920 }
1921 break;
1922 case HAL_PIXEL_FORMAT_RAW10:
1923 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1924 case HAL_PIXEL_FORMAT_RAW16:
1925 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001926 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1927 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1928 pdStatCount++;
1929 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001930 break;
1931 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1932 processedStreamCnt++;
1933 if (isOnEncoder(maxViewfinderSize, newStream->width,
1934 newStream->height)) {
1935 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1936 !IS_USAGE_ZSL(newStream->usage)) {
1937 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1938 }
1939 numStreamsOnEncoder++;
1940 }
1941 break;
1942 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1943 processedStreamCnt++;
1944 if (isOnEncoder(maxViewfinderSize, newStream->width,
1945 newStream->height)) {
1946 // If Yuv888 size is not greater than 4K, set feature mask
1947 // to SUPERSET so that it support concurrent request on
1948 // YUV and JPEG.
1949 if (newStream->width <= VIDEO_4K_WIDTH &&
1950 newStream->height <= VIDEO_4K_HEIGHT) {
1951 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1952 }
1953 numStreamsOnEncoder++;
1954 numYuv888OnEncoder++;
1955 largeYuv888Size.width = newStream->width;
1956 largeYuv888Size.height = newStream->height;
1957 }
1958 break;
1959 default:
1960 processedStreamCnt++;
1961 if (isOnEncoder(maxViewfinderSize, newStream->width,
1962 newStream->height)) {
1963 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1964 numStreamsOnEncoder++;
1965 }
1966 break;
1967 }
1968
1969 }
1970 }
1971
1972 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1973 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1974 !m_bIsVideo) {
1975 m_bEisEnable = false;
1976 }
1977
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001978 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1979 pthread_mutex_unlock(&mMutex);
1980 return -EINVAL;
1981 }
1982
Thierry Strudel54dc9782017-02-15 12:12:10 -08001983 uint8_t forceEnableTnr = 0;
1984 char tnr_prop[PROPERTY_VALUE_MAX];
1985 memset(tnr_prop, 0, sizeof(tnr_prop));
1986 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1987 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1988
Thierry Strudel3d639192016-09-09 11:52:26 -07001989 /* Logic to enable/disable TNR based on specific config size/etc.*/
1990 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001991 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1992 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001993 else if (forceEnableTnr)
1994 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001995
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001996 char videoHdrProp[PROPERTY_VALUE_MAX];
1997 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1998 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1999 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2000
2001 if (hdr_mode_prop == 1 && m_bIsVideo &&
2002 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2003 m_bVideoHdrEnabled = true;
2004 else
2005 m_bVideoHdrEnabled = false;
2006
2007
Thierry Strudel3d639192016-09-09 11:52:26 -07002008 /* Check if num_streams is sane */
2009 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2010 rawStreamCnt > MAX_RAW_STREAMS ||
2011 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2012 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2013 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2014 pthread_mutex_unlock(&mMutex);
2015 return -EINVAL;
2016 }
2017 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002018 if (isZsl && m_bIs4KVideo) {
2019 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002020 pthread_mutex_unlock(&mMutex);
2021 return -EINVAL;
2022 }
2023 /* Check if stream sizes are sane */
2024 if (numStreamsOnEncoder > 2) {
2025 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2026 pthread_mutex_unlock(&mMutex);
2027 return -EINVAL;
2028 } else if (1 < numStreamsOnEncoder){
2029 bUseCommonFeatureMask = true;
2030 LOGH("Multiple streams above max viewfinder size, common mask needed");
2031 }
2032
2033 /* Check if BLOB size is greater than 4k in 4k recording case */
2034 if (m_bIs4KVideo && bJpegExceeds4K) {
2035 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2036 pthread_mutex_unlock(&mMutex);
2037 return -EINVAL;
2038 }
2039
Emilian Peev7650c122017-01-19 08:24:33 -08002040 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2041 depthPresent) {
2042 LOGE("HAL doesn't support depth streams in HFR mode!");
2043 pthread_mutex_unlock(&mMutex);
2044 return -EINVAL;
2045 }
2046
Thierry Strudel3d639192016-09-09 11:52:26 -07002047 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2048 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2049 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2050 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2051 // configurations:
2052 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2053 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2054 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2055 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2056 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2057 __func__);
2058 pthread_mutex_unlock(&mMutex);
2059 return -EINVAL;
2060 }
2061
2062 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2063 // the YUV stream's size is greater or equal to the JPEG size, set common
2064 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2065 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2066 jpegSize.width, jpegSize.height) &&
2067 largeYuv888Size.width > jpegSize.width &&
2068 largeYuv888Size.height > jpegSize.height) {
2069 bYuv888OverrideJpeg = true;
2070 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2071 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2072 }
2073
2074 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2075 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2076 commonFeatureMask);
2077 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2078 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2079
2080 rc = validateStreamDimensions(streamList);
2081 if (rc == NO_ERROR) {
2082 rc = validateStreamRotations(streamList);
2083 }
2084 if (rc != NO_ERROR) {
2085 LOGE("Invalid stream configuration requested!");
2086 pthread_mutex_unlock(&mMutex);
2087 return rc;
2088 }
2089
Emilian Peev0f3c3162017-03-15 12:57:46 +00002090 if (1 < pdStatCount) {
2091 LOGE("HAL doesn't support multiple PD streams");
2092 pthread_mutex_unlock(&mMutex);
2093 return -EINVAL;
2094 }
2095
2096 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2097 (1 == pdStatCount)) {
2098 LOGE("HAL doesn't support PD streams in HFR mode!");
2099 pthread_mutex_unlock(&mMutex);
2100 return -EINVAL;
2101 }
2102
Thierry Strudel3d639192016-09-09 11:52:26 -07002103 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2104 for (size_t i = 0; i < streamList->num_streams; i++) {
2105 camera3_stream_t *newStream = streamList->streams[i];
2106 LOGH("newStream type = %d, stream format = %d "
2107 "stream size : %d x %d, stream rotation = %d",
2108 newStream->stream_type, newStream->format,
2109 newStream->width, newStream->height, newStream->rotation);
2110 //if the stream is in the mStreamList validate it
2111 bool stream_exists = false;
2112 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2113 it != mStreamInfo.end(); it++) {
2114 if ((*it)->stream == newStream) {
2115 QCamera3ProcessingChannel *channel =
2116 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2117 stream_exists = true;
2118 if (channel)
2119 delete channel;
2120 (*it)->status = VALID;
2121 (*it)->stream->priv = NULL;
2122 (*it)->channel = NULL;
2123 }
2124 }
2125 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2126 //new stream
2127 stream_info_t* stream_info;
2128 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2129 if (!stream_info) {
2130 LOGE("Could not allocate stream info");
2131 rc = -ENOMEM;
2132 pthread_mutex_unlock(&mMutex);
2133 return rc;
2134 }
2135 stream_info->stream = newStream;
2136 stream_info->status = VALID;
2137 stream_info->channel = NULL;
2138 mStreamInfo.push_back(stream_info);
2139 }
2140 /* Covers Opaque ZSL and API1 F/W ZSL */
2141 if (IS_USAGE_ZSL(newStream->usage)
2142 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2143 if (zslStream != NULL) {
2144 LOGE("Multiple input/reprocess streams requested!");
2145 pthread_mutex_unlock(&mMutex);
2146 return BAD_VALUE;
2147 }
2148 zslStream = newStream;
2149 }
2150 /* Covers YUV reprocess */
2151 if (inputStream != NULL) {
2152 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2153 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2154 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2155 && inputStream->width == newStream->width
2156 && inputStream->height == newStream->height) {
2157 if (zslStream != NULL) {
2158 /* This scenario indicates multiple YUV streams with same size
2159 * as input stream have been requested, since zsl stream handle
2160 * is solely use for the purpose of overriding the size of streams
2161 * which share h/w streams we will just make a guess here as to
2162 * which of the stream is a ZSL stream, this will be refactored
2163 * once we make generic logic for streams sharing encoder output
2164 */
2165 LOGH("Warning, Multiple ip/reprocess streams requested!");
2166 }
2167 zslStream = newStream;
2168 }
2169 }
2170 }
2171
2172 /* If a zsl stream is set, we know that we have configured at least one input or
2173 bidirectional stream */
2174 if (NULL != zslStream) {
2175 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2176 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2177 mInputStreamInfo.format = zslStream->format;
2178 mInputStreamInfo.usage = zslStream->usage;
2179 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2180 mInputStreamInfo.dim.width,
2181 mInputStreamInfo.dim.height,
2182 mInputStreamInfo.format, mInputStreamInfo.usage);
2183 }
2184
2185 cleanAndSortStreamInfo();
2186 if (mMetadataChannel) {
2187 delete mMetadataChannel;
2188 mMetadataChannel = NULL;
2189 }
2190 if (mSupportChannel) {
2191 delete mSupportChannel;
2192 mSupportChannel = NULL;
2193 }
2194
2195 if (mAnalysisChannel) {
2196 delete mAnalysisChannel;
2197 mAnalysisChannel = NULL;
2198 }
2199
2200 if (mDummyBatchChannel) {
2201 delete mDummyBatchChannel;
2202 mDummyBatchChannel = NULL;
2203 }
2204
Emilian Peev7650c122017-01-19 08:24:33 -08002205 if (mDepthChannel) {
2206 mDepthChannel = NULL;
2207 }
2208
Thierry Strudel2896d122017-02-23 19:18:03 -08002209 char is_type_value[PROPERTY_VALUE_MAX];
2210 property_get("persist.camera.is_type", is_type_value, "4");
2211 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2212
Binhao Line406f062017-05-03 14:39:44 -07002213 char property_value[PROPERTY_VALUE_MAX];
2214 property_get("persist.camera.gzoom.at", property_value, "0");
2215 int goog_zoom_at = atoi(property_value);
2216 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0);
2217 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0);
2218
2219 property_get("persist.camera.gzoom.4k", property_value, "0");
2220 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2221
Thierry Strudel3d639192016-09-09 11:52:26 -07002222 //Create metadata channel and initialize it
2223 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2224 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2225 gCamCapability[mCameraId]->color_arrangement);
2226 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2227 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002228 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002229 if (mMetadataChannel == NULL) {
2230 LOGE("failed to allocate metadata channel");
2231 rc = -ENOMEM;
2232 pthread_mutex_unlock(&mMutex);
2233 return rc;
2234 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002235 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002236 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2237 if (rc < 0) {
2238 LOGE("metadata channel initialization failed");
2239 delete mMetadataChannel;
2240 mMetadataChannel = NULL;
2241 pthread_mutex_unlock(&mMutex);
2242 return rc;
2243 }
2244
Thierry Strudel2896d122017-02-23 19:18:03 -08002245 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002246 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002247 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002248 // Keep track of preview/video streams indices.
2249 // There could be more than one preview streams, but only one video stream.
2250 int32_t video_stream_idx = -1;
2251 int32_t preview_stream_idx[streamList->num_streams];
2252 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002253 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2254 /* Allocate channel objects for the requested streams */
2255 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002256
Thierry Strudel3d639192016-09-09 11:52:26 -07002257 camera3_stream_t *newStream = streamList->streams[i];
2258 uint32_t stream_usage = newStream->usage;
2259 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2260 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2261 struct camera_info *p_info = NULL;
2262 pthread_mutex_lock(&gCamLock);
2263 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2264 pthread_mutex_unlock(&gCamLock);
2265 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2266 || IS_USAGE_ZSL(newStream->usage)) &&
2267 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002268 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002269 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002270 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2271 if (bUseCommonFeatureMask)
2272 zsl_ppmask = commonFeatureMask;
2273 else
2274 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002275 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002276 if (numStreamsOnEncoder > 0)
2277 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2278 else
2279 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002280 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002281 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002282 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002283 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002284 LOGH("Input stream configured, reprocess config");
2285 } else {
2286 //for non zsl streams find out the format
2287 switch (newStream->format) {
2288 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2289 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002290 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002291 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2292 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2293 /* add additional features to pp feature mask */
2294 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2295 mStreamConfigInfo.num_streams);
2296
2297 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2298 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2299 CAM_STREAM_TYPE_VIDEO;
2300 if (m_bTnrEnabled && m_bTnrVideo) {
2301 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2302 CAM_QCOM_FEATURE_CPP_TNR;
2303 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2304 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2305 ~CAM_QCOM_FEATURE_CDS;
2306 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002307 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2308 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2309 CAM_QTI_FEATURE_PPEISCORE;
2310 }
Binhao Line406f062017-05-03 14:39:44 -07002311 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2312 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2313 CAM_QCOM_FEATURE_GOOG_ZOOM;
2314 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002315 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002316 } else {
2317 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2318 CAM_STREAM_TYPE_PREVIEW;
2319 if (m_bTnrEnabled && m_bTnrPreview) {
2320 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2321 CAM_QCOM_FEATURE_CPP_TNR;
2322 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2323 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2324 ~CAM_QCOM_FEATURE_CDS;
2325 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002326 if(!m_bSwTnrPreview) {
2327 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2328 ~CAM_QTI_FEATURE_SW_TNR;
2329 }
Binhao Line406f062017-05-03 14:39:44 -07002330 if (is_goog_zoom_preview_enabled) {
2331 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2332 CAM_QCOM_FEATURE_GOOG_ZOOM;
2333 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002334 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002335 padding_info.width_padding = mSurfaceStridePadding;
2336 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002337 previewSize.width = (int32_t)newStream->width;
2338 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002339 }
2340 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2341 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2342 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2343 newStream->height;
2344 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2345 newStream->width;
2346 }
2347 }
2348 break;
2349 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002350 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002351 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2352 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2353 if (bUseCommonFeatureMask)
2354 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2355 commonFeatureMask;
2356 else
2357 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2358 CAM_QCOM_FEATURE_NONE;
2359 } else {
2360 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2361 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2362 }
2363 break;
2364 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002365 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002366 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2367 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2368 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2369 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2370 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002371 /* Remove rotation if it is not supported
2372 for 4K LiveVideo snapshot case (online processing) */
2373 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2374 CAM_QCOM_FEATURE_ROTATION)) {
2375 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2376 &= ~CAM_QCOM_FEATURE_ROTATION;
2377 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002378 } else {
2379 if (bUseCommonFeatureMask &&
2380 isOnEncoder(maxViewfinderSize, newStream->width,
2381 newStream->height)) {
2382 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2383 } else {
2384 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2385 }
2386 }
2387 if (isZsl) {
2388 if (zslStream) {
2389 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2390 (int32_t)zslStream->width;
2391 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2392 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002393 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2394 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002395 } else {
2396 LOGE("Error, No ZSL stream identified");
2397 pthread_mutex_unlock(&mMutex);
2398 return -EINVAL;
2399 }
2400 } else if (m_bIs4KVideo) {
2401 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2402 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2403 } else if (bYuv888OverrideJpeg) {
2404 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2405 (int32_t)largeYuv888Size.width;
2406 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2407 (int32_t)largeYuv888Size.height;
2408 }
2409 break;
2410 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2411 case HAL_PIXEL_FORMAT_RAW16:
2412 case HAL_PIXEL_FORMAT_RAW10:
2413 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2414 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2415 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002416 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2417 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2418 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2419 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2420 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2421 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2422 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2423 gCamCapability[mCameraId]->dt[mPDIndex];
2424 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2425 gCamCapability[mCameraId]->vc[mPDIndex];
2426 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002427 break;
2428 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002429 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002430 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2431 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2432 break;
2433 }
2434 }
2435
2436 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2437 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2438 gCamCapability[mCameraId]->color_arrangement);
2439
2440 if (newStream->priv == NULL) {
2441 //New stream, construct channel
2442 switch (newStream->stream_type) {
2443 case CAMERA3_STREAM_INPUT:
2444 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2445 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2446 break;
2447 case CAMERA3_STREAM_BIDIRECTIONAL:
2448 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2449 GRALLOC_USAGE_HW_CAMERA_WRITE;
2450 break;
2451 case CAMERA3_STREAM_OUTPUT:
2452 /* For video encoding stream, set read/write rarely
2453 * flag so that they may be set to un-cached */
2454 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2455 newStream->usage |=
2456 (GRALLOC_USAGE_SW_READ_RARELY |
2457 GRALLOC_USAGE_SW_WRITE_RARELY |
2458 GRALLOC_USAGE_HW_CAMERA_WRITE);
2459 else if (IS_USAGE_ZSL(newStream->usage))
2460 {
2461 LOGD("ZSL usage flag skipping");
2462 }
2463 else if (newStream == zslStream
2464 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2465 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2466 } else
2467 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2468 break;
2469 default:
2470 LOGE("Invalid stream_type %d", newStream->stream_type);
2471 break;
2472 }
2473
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002474 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002475 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2476 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2477 QCamera3ProcessingChannel *channel = NULL;
2478 switch (newStream->format) {
2479 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2480 if ((newStream->usage &
2481 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2482 (streamList->operation_mode ==
2483 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2484 ) {
2485 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2486 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002487 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002488 this,
2489 newStream,
2490 (cam_stream_type_t)
2491 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2492 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2493 mMetadataChannel,
2494 0); //heap buffers are not required for HFR video channel
2495 if (channel == NULL) {
2496 LOGE("allocation of channel failed");
2497 pthread_mutex_unlock(&mMutex);
2498 return -ENOMEM;
2499 }
2500 //channel->getNumBuffers() will return 0 here so use
2501 //MAX_INFLIGH_HFR_REQUESTS
2502 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2503 newStream->priv = channel;
2504 LOGI("num video buffers in HFR mode: %d",
2505 MAX_INFLIGHT_HFR_REQUESTS);
2506 } else {
2507 /* Copy stream contents in HFR preview only case to create
2508 * dummy batch channel so that sensor streaming is in
2509 * HFR mode */
2510 if (!m_bIsVideo && (streamList->operation_mode ==
2511 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2512 mDummyBatchStream = *newStream;
2513 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002514 int bufferCount = MAX_INFLIGHT_REQUESTS;
2515 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2516 CAM_STREAM_TYPE_VIDEO) {
2517 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2518 bufferCount = MAX_VIDEO_BUFFERS;
2519 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002520 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2521 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002522 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002523 this,
2524 newStream,
2525 (cam_stream_type_t)
2526 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2527 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2528 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002529 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002530 if (channel == NULL) {
2531 LOGE("allocation of channel failed");
2532 pthread_mutex_unlock(&mMutex);
2533 return -ENOMEM;
2534 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002535 /* disable UBWC for preview, though supported,
2536 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002537 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002538 (previewSize.width == (int32_t)videoWidth)&&
2539 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002540 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002541 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002542 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002543 /* When goog_zoom is linked to the preview or video stream,
2544 * disable ubwc to the linked stream */
2545 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2546 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2547 channel->setUBWCEnabled(false);
2548 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002549 newStream->max_buffers = channel->getNumBuffers();
2550 newStream->priv = channel;
2551 }
2552 break;
2553 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2554 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2555 mChannelHandle,
2556 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002557 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002558 this,
2559 newStream,
2560 (cam_stream_type_t)
2561 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2562 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2563 mMetadataChannel);
2564 if (channel == NULL) {
2565 LOGE("allocation of YUV channel failed");
2566 pthread_mutex_unlock(&mMutex);
2567 return -ENOMEM;
2568 }
2569 newStream->max_buffers = channel->getNumBuffers();
2570 newStream->priv = channel;
2571 break;
2572 }
2573 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2574 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002575 case HAL_PIXEL_FORMAT_RAW10: {
2576 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2577 (HAL_DATASPACE_DEPTH != newStream->data_space))
2578 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002579 mRawChannel = new QCamera3RawChannel(
2580 mCameraHandle->camera_handle, mChannelHandle,
2581 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002582 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002583 this, newStream,
2584 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002585 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002586 if (mRawChannel == NULL) {
2587 LOGE("allocation of raw channel failed");
2588 pthread_mutex_unlock(&mMutex);
2589 return -ENOMEM;
2590 }
2591 newStream->max_buffers = mRawChannel->getNumBuffers();
2592 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2593 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002594 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002595 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002596 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2597 mDepthChannel = new QCamera3DepthChannel(
2598 mCameraHandle->camera_handle, mChannelHandle,
2599 mCameraHandle->ops, NULL, NULL, &padding_info,
2600 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2601 mMetadataChannel);
2602 if (NULL == mDepthChannel) {
2603 LOGE("Allocation of depth channel failed");
2604 pthread_mutex_unlock(&mMutex);
2605 return NO_MEMORY;
2606 }
2607 newStream->priv = mDepthChannel;
2608 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2609 } else {
2610 // Max live snapshot inflight buffer is 1. This is to mitigate
2611 // frame drop issues for video snapshot. The more buffers being
2612 // allocated, the more frame drops there are.
2613 mPictureChannel = new QCamera3PicChannel(
2614 mCameraHandle->camera_handle, mChannelHandle,
2615 mCameraHandle->ops, captureResultCb,
2616 setBufferErrorStatus, &padding_info, this, newStream,
2617 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2618 m_bIs4KVideo, isZsl, mMetadataChannel,
2619 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2620 if (mPictureChannel == NULL) {
2621 LOGE("allocation of channel failed");
2622 pthread_mutex_unlock(&mMutex);
2623 return -ENOMEM;
2624 }
2625 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2626 newStream->max_buffers = mPictureChannel->getNumBuffers();
2627 mPictureChannel->overrideYuvSize(
2628 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2629 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002630 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002631 break;
2632
2633 default:
2634 LOGE("not a supported format 0x%x", newStream->format);
2635 break;
2636 }
2637 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2638 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2639 } else {
2640 LOGE("Error, Unknown stream type");
2641 pthread_mutex_unlock(&mMutex);
2642 return -EINVAL;
2643 }
2644
2645 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002646 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2647 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002648 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002649 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002650 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2651 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2652 }
2653 }
2654
2655 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2656 it != mStreamInfo.end(); it++) {
2657 if ((*it)->stream == newStream) {
2658 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2659 break;
2660 }
2661 }
2662 } else {
2663 // Channel already exists for this stream
2664 // Do nothing for now
2665 }
2666 padding_info = gCamCapability[mCameraId]->padding_info;
2667
Emilian Peev7650c122017-01-19 08:24:33 -08002668 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002669 * since there is no real stream associated with it
2670 */
Emilian Peev7650c122017-01-19 08:24:33 -08002671 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002672 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2673 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002674 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002675 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002676 }
2677
Binhao Lincdb362a2017-04-20 13:31:54 -07002678 // By default, preview stream TNR is disabled.
2679 // Enable TNR to the preview stream if all conditions below are satisfied:
2680 // 1. resolution <= 1080p.
2681 // 2. preview resolution == video resolution.
2682 // 3. video stream TNR is enabled.
2683 // 4. EIS2.0
2684 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2685 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2686 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2687 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2688 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2689 video_stream->width == preview_stream->width &&
2690 video_stream->height == preview_stream->height) {
2691 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2692 CAM_QCOM_FEATURE_CPP_TNR;
2693 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2694 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2695 ~CAM_QCOM_FEATURE_CDS;
2696 }
2697 }
2698
Thierry Strudel2896d122017-02-23 19:18:03 -08002699 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2700 onlyRaw = false;
2701 }
2702
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002703 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002704 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002705 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002706 cam_analysis_info_t analysisInfo;
2707 int32_t ret = NO_ERROR;
2708 ret = mCommon.getAnalysisInfo(
2709 FALSE,
2710 analysisFeatureMask,
2711 &analysisInfo);
2712 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002713 cam_color_filter_arrangement_t analysis_color_arrangement =
2714 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2715 CAM_FILTER_ARRANGEMENT_Y :
2716 gCamCapability[mCameraId]->color_arrangement);
2717 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2718 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002719 cam_dimension_t analysisDim;
2720 analysisDim = mCommon.getMatchingDimension(previewSize,
2721 analysisInfo.analysis_recommended_res);
2722
2723 mAnalysisChannel = new QCamera3SupportChannel(
2724 mCameraHandle->camera_handle,
2725 mChannelHandle,
2726 mCameraHandle->ops,
2727 &analysisInfo.analysis_padding_info,
2728 analysisFeatureMask,
2729 CAM_STREAM_TYPE_ANALYSIS,
2730 &analysisDim,
2731 (analysisInfo.analysis_format
2732 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2733 : CAM_FORMAT_YUV_420_NV21),
2734 analysisInfo.hw_analysis_supported,
2735 gCamCapability[mCameraId]->color_arrangement,
2736 this,
2737 0); // force buffer count to 0
2738 } else {
2739 LOGW("getAnalysisInfo failed, ret = %d", ret);
2740 }
2741 if (!mAnalysisChannel) {
2742 LOGW("Analysis channel cannot be created");
2743 }
2744 }
2745
Thierry Strudel3d639192016-09-09 11:52:26 -07002746 //RAW DUMP channel
2747 if (mEnableRawDump && isRawStreamRequested == false){
2748 cam_dimension_t rawDumpSize;
2749 rawDumpSize = getMaxRawSize(mCameraId);
2750 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2751 setPAAFSupport(rawDumpFeatureMask,
2752 CAM_STREAM_TYPE_RAW,
2753 gCamCapability[mCameraId]->color_arrangement);
2754 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2755 mChannelHandle,
2756 mCameraHandle->ops,
2757 rawDumpSize,
2758 &padding_info,
2759 this, rawDumpFeatureMask);
2760 if (!mRawDumpChannel) {
2761 LOGE("Raw Dump channel cannot be created");
2762 pthread_mutex_unlock(&mMutex);
2763 return -ENOMEM;
2764 }
2765 }
2766
Thierry Strudel3d639192016-09-09 11:52:26 -07002767 if (mAnalysisChannel) {
2768 cam_analysis_info_t analysisInfo;
2769 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2770 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2771 CAM_STREAM_TYPE_ANALYSIS;
2772 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2773 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002774 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002775 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2776 &analysisInfo);
2777 if (rc != NO_ERROR) {
2778 LOGE("getAnalysisInfo failed, ret = %d", rc);
2779 pthread_mutex_unlock(&mMutex);
2780 return rc;
2781 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002782 cam_color_filter_arrangement_t analysis_color_arrangement =
2783 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2784 CAM_FILTER_ARRANGEMENT_Y :
2785 gCamCapability[mCameraId]->color_arrangement);
2786 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2787 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2788 analysis_color_arrangement);
2789
Thierry Strudel3d639192016-09-09 11:52:26 -07002790 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002791 mCommon.getMatchingDimension(previewSize,
2792 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002793 mStreamConfigInfo.num_streams++;
2794 }
2795
Thierry Strudel2896d122017-02-23 19:18:03 -08002796 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002797 cam_analysis_info_t supportInfo;
2798 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2799 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2800 setPAAFSupport(callbackFeatureMask,
2801 CAM_STREAM_TYPE_CALLBACK,
2802 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002803 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002804 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002805 if (ret != NO_ERROR) {
2806 /* Ignore the error for Mono camera
2807 * because the PAAF bit mask is only set
2808 * for CAM_STREAM_TYPE_ANALYSIS stream type
2809 */
2810 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2811 LOGW("getAnalysisInfo failed, ret = %d", ret);
2812 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002813 }
2814 mSupportChannel = new QCamera3SupportChannel(
2815 mCameraHandle->camera_handle,
2816 mChannelHandle,
2817 mCameraHandle->ops,
2818 &gCamCapability[mCameraId]->padding_info,
2819 callbackFeatureMask,
2820 CAM_STREAM_TYPE_CALLBACK,
2821 &QCamera3SupportChannel::kDim,
2822 CAM_FORMAT_YUV_420_NV21,
2823 supportInfo.hw_analysis_supported,
2824 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002825 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002826 if (!mSupportChannel) {
2827 LOGE("dummy channel cannot be created");
2828 pthread_mutex_unlock(&mMutex);
2829 return -ENOMEM;
2830 }
2831 }
2832
2833 if (mSupportChannel) {
2834 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2835 QCamera3SupportChannel::kDim;
2836 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2837 CAM_STREAM_TYPE_CALLBACK;
2838 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2839 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2840 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2841 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2842 gCamCapability[mCameraId]->color_arrangement);
2843 mStreamConfigInfo.num_streams++;
2844 }
2845
2846 if (mRawDumpChannel) {
2847 cam_dimension_t rawSize;
2848 rawSize = getMaxRawSize(mCameraId);
2849 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2850 rawSize;
2851 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2852 CAM_STREAM_TYPE_RAW;
2853 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2854 CAM_QCOM_FEATURE_NONE;
2855 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2856 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2857 gCamCapability[mCameraId]->color_arrangement);
2858 mStreamConfigInfo.num_streams++;
2859 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002860
2861 if (mHdrPlusRawSrcChannel) {
2862 cam_dimension_t rawSize;
2863 rawSize = getMaxRawSize(mCameraId);
2864 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2865 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2866 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2867 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2868 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2869 gCamCapability[mCameraId]->color_arrangement);
2870 mStreamConfigInfo.num_streams++;
2871 }
2872
Thierry Strudel3d639192016-09-09 11:52:26 -07002873 /* In HFR mode, if video stream is not added, create a dummy channel so that
2874 * ISP can create a batch mode even for preview only case. This channel is
2875 * never 'start'ed (no stream-on), it is only 'initialized' */
2876 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2877 !m_bIsVideo) {
2878 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2879 setPAAFSupport(dummyFeatureMask,
2880 CAM_STREAM_TYPE_VIDEO,
2881 gCamCapability[mCameraId]->color_arrangement);
2882 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2883 mChannelHandle,
2884 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002885 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002886 this,
2887 &mDummyBatchStream,
2888 CAM_STREAM_TYPE_VIDEO,
2889 dummyFeatureMask,
2890 mMetadataChannel);
2891 if (NULL == mDummyBatchChannel) {
2892 LOGE("creation of mDummyBatchChannel failed."
2893 "Preview will use non-hfr sensor mode ");
2894 }
2895 }
2896 if (mDummyBatchChannel) {
2897 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2898 mDummyBatchStream.width;
2899 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2900 mDummyBatchStream.height;
2901 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2902 CAM_STREAM_TYPE_VIDEO;
2903 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2904 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2905 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2906 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2907 gCamCapability[mCameraId]->color_arrangement);
2908 mStreamConfigInfo.num_streams++;
2909 }
2910
2911 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2912 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002913 m_bIs4KVideo ? 0 :
2914 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002915
2916 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2917 for (pendingRequestIterator i = mPendingRequestsList.begin();
2918 i != mPendingRequestsList.end();) {
2919 i = erasePendingRequest(i);
2920 }
2921 mPendingFrameDropList.clear();
2922 // Initialize/Reset the pending buffers list
2923 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2924 req.mPendingBufferList.clear();
2925 }
2926 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2927
Thierry Strudel3d639192016-09-09 11:52:26 -07002928 mCurJpegMeta.clear();
2929 //Get min frame duration for this streams configuration
2930 deriveMinFrameDuration();
2931
Chien-Yu Chenee335912017-02-09 17:53:20 -08002932 mFirstPreviewIntentSeen = false;
2933
2934 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002935 {
2936 Mutex::Autolock l(gHdrPlusClientLock);
2937 disableHdrPlusModeLocked();
2938 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002939
Thierry Strudel3d639192016-09-09 11:52:26 -07002940 // Update state
2941 mState = CONFIGURED;
2942
Shuzhen Wang3c077d72017-04-20 22:48:59 -07002943 mFirstMetadataCallback = true;
2944
Thierry Strudel3d639192016-09-09 11:52:26 -07002945 pthread_mutex_unlock(&mMutex);
2946
2947 return rc;
2948}
2949
2950/*===========================================================================
2951 * FUNCTION : validateCaptureRequest
2952 *
2953 * DESCRIPTION: validate a capture request from camera service
2954 *
2955 * PARAMETERS :
2956 * @request : request from framework to process
2957 *
2958 * RETURN :
2959 *
2960 *==========================================================================*/
2961int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002962 camera3_capture_request_t *request,
2963 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002964{
2965 ssize_t idx = 0;
2966 const camera3_stream_buffer_t *b;
2967 CameraMetadata meta;
2968
2969 /* Sanity check the request */
2970 if (request == NULL) {
2971 LOGE("NULL capture request");
2972 return BAD_VALUE;
2973 }
2974
2975 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2976 /*settings cannot be null for the first request*/
2977 return BAD_VALUE;
2978 }
2979
2980 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002981 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2982 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002983 LOGE("Request %d: No output buffers provided!",
2984 __FUNCTION__, frameNumber);
2985 return BAD_VALUE;
2986 }
2987 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2988 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2989 request->num_output_buffers, MAX_NUM_STREAMS);
2990 return BAD_VALUE;
2991 }
2992 if (request->input_buffer != NULL) {
2993 b = request->input_buffer;
2994 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2995 LOGE("Request %d: Buffer %ld: Status not OK!",
2996 frameNumber, (long)idx);
2997 return BAD_VALUE;
2998 }
2999 if (b->release_fence != -1) {
3000 LOGE("Request %d: Buffer %ld: Has a release fence!",
3001 frameNumber, (long)idx);
3002 return BAD_VALUE;
3003 }
3004 if (b->buffer == NULL) {
3005 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3006 frameNumber, (long)idx);
3007 return BAD_VALUE;
3008 }
3009 }
3010
3011 // Validate all buffers
3012 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003013 if (b == NULL) {
3014 return BAD_VALUE;
3015 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003016 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003017 QCamera3ProcessingChannel *channel =
3018 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3019 if (channel == NULL) {
3020 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3021 frameNumber, (long)idx);
3022 return BAD_VALUE;
3023 }
3024 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3025 LOGE("Request %d: Buffer %ld: Status not OK!",
3026 frameNumber, (long)idx);
3027 return BAD_VALUE;
3028 }
3029 if (b->release_fence != -1) {
3030 LOGE("Request %d: Buffer %ld: Has a release fence!",
3031 frameNumber, (long)idx);
3032 return BAD_VALUE;
3033 }
3034 if (b->buffer == NULL) {
3035 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3036 frameNumber, (long)idx);
3037 return BAD_VALUE;
3038 }
3039 if (*(b->buffer) == NULL) {
3040 LOGE("Request %d: Buffer %ld: NULL private handle!",
3041 frameNumber, (long)idx);
3042 return BAD_VALUE;
3043 }
3044 idx++;
3045 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003046 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003047 return NO_ERROR;
3048}
3049
3050/*===========================================================================
3051 * FUNCTION : deriveMinFrameDuration
3052 *
3053 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3054 * on currently configured streams.
3055 *
3056 * PARAMETERS : NONE
3057 *
3058 * RETURN : NONE
3059 *
3060 *==========================================================================*/
3061void QCamera3HardwareInterface::deriveMinFrameDuration()
3062{
3063 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3064
3065 maxJpegDim = 0;
3066 maxProcessedDim = 0;
3067 maxRawDim = 0;
3068
3069 // Figure out maximum jpeg, processed, and raw dimensions
3070 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3071 it != mStreamInfo.end(); it++) {
3072
3073 // Input stream doesn't have valid stream_type
3074 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3075 continue;
3076
3077 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3078 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3079 if (dimension > maxJpegDim)
3080 maxJpegDim = dimension;
3081 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3082 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3083 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3084 if (dimension > maxRawDim)
3085 maxRawDim = dimension;
3086 } else {
3087 if (dimension > maxProcessedDim)
3088 maxProcessedDim = dimension;
3089 }
3090 }
3091
3092 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3093 MAX_SIZES_CNT);
3094
3095 //Assume all jpeg dimensions are in processed dimensions.
3096 if (maxJpegDim > maxProcessedDim)
3097 maxProcessedDim = maxJpegDim;
3098 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3099 if (maxProcessedDim > maxRawDim) {
3100 maxRawDim = INT32_MAX;
3101
3102 for (size_t i = 0; i < count; i++) {
3103 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3104 gCamCapability[mCameraId]->raw_dim[i].height;
3105 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3106 maxRawDim = dimension;
3107 }
3108 }
3109
3110 //Find minimum durations for processed, jpeg, and raw
3111 for (size_t i = 0; i < count; i++) {
3112 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3113 gCamCapability[mCameraId]->raw_dim[i].height) {
3114 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3115 break;
3116 }
3117 }
3118 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3119 for (size_t i = 0; i < count; i++) {
3120 if (maxProcessedDim ==
3121 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3122 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3123 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3124 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3125 break;
3126 }
3127 }
3128}
3129
3130/*===========================================================================
3131 * FUNCTION : getMinFrameDuration
3132 *
3133 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3134 * and current request configuration.
3135 *
3136 * PARAMETERS : @request: requset sent by the frameworks
3137 *
3138 * RETURN : min farme duration for a particular request
3139 *
3140 *==========================================================================*/
3141int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3142{
3143 bool hasJpegStream = false;
3144 bool hasRawStream = false;
3145 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3146 const camera3_stream_t *stream = request->output_buffers[i].stream;
3147 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3148 hasJpegStream = true;
3149 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3150 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3151 stream->format == HAL_PIXEL_FORMAT_RAW16)
3152 hasRawStream = true;
3153 }
3154
3155 if (!hasJpegStream)
3156 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3157 else
3158 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3159}
3160
3161/*===========================================================================
3162 * FUNCTION : handleBuffersDuringFlushLock
3163 *
3164 * DESCRIPTION: Account for buffers returned from back-end during flush
3165 * This function is executed while mMutex is held by the caller.
3166 *
3167 * PARAMETERS :
3168 * @buffer: image buffer for the callback
3169 *
3170 * RETURN :
3171 *==========================================================================*/
3172void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3173{
3174 bool buffer_found = false;
3175 for (List<PendingBuffersInRequest>::iterator req =
3176 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3177 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3178 for (List<PendingBufferInfo>::iterator i =
3179 req->mPendingBufferList.begin();
3180 i != req->mPendingBufferList.end(); i++) {
3181 if (i->buffer == buffer->buffer) {
3182 mPendingBuffersMap.numPendingBufsAtFlush--;
3183 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3184 buffer->buffer, req->frame_number,
3185 mPendingBuffersMap.numPendingBufsAtFlush);
3186 buffer_found = true;
3187 break;
3188 }
3189 }
3190 if (buffer_found) {
3191 break;
3192 }
3193 }
3194 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3195 //signal the flush()
3196 LOGD("All buffers returned to HAL. Continue flush");
3197 pthread_cond_signal(&mBuffersCond);
3198 }
3199}
3200
Thierry Strudel3d639192016-09-09 11:52:26 -07003201/*===========================================================================
3202 * FUNCTION : handleBatchMetadata
3203 *
3204 * DESCRIPTION: Handles metadata buffer callback in batch mode
3205 *
3206 * PARAMETERS : @metadata_buf: metadata buffer
3207 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3208 * the meta buf in this method
3209 *
3210 * RETURN :
3211 *
3212 *==========================================================================*/
3213void QCamera3HardwareInterface::handleBatchMetadata(
3214 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3215{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003216 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003217
3218 if (NULL == metadata_buf) {
3219 LOGE("metadata_buf is NULL");
3220 return;
3221 }
3222 /* In batch mode, the metdata will contain the frame number and timestamp of
3223 * the last frame in the batch. Eg: a batch containing buffers from request
3224 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3225 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3226 * multiple process_capture_results */
3227 metadata_buffer_t *metadata =
3228 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3229 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3230 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3231 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3232 uint32_t frame_number = 0, urgent_frame_number = 0;
3233 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3234 bool invalid_metadata = false;
3235 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3236 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003237 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003238
3239 int32_t *p_frame_number_valid =
3240 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3241 uint32_t *p_frame_number =
3242 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3243 int64_t *p_capture_time =
3244 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3245 int32_t *p_urgent_frame_number_valid =
3246 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3247 uint32_t *p_urgent_frame_number =
3248 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3249
3250 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3251 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3252 (NULL == p_urgent_frame_number)) {
3253 LOGE("Invalid metadata");
3254 invalid_metadata = true;
3255 } else {
3256 frame_number_valid = *p_frame_number_valid;
3257 last_frame_number = *p_frame_number;
3258 last_frame_capture_time = *p_capture_time;
3259 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3260 last_urgent_frame_number = *p_urgent_frame_number;
3261 }
3262
3263 /* In batchmode, when no video buffers are requested, set_parms are sent
3264 * for every capture_request. The difference between consecutive urgent
3265 * frame numbers and frame numbers should be used to interpolate the
3266 * corresponding frame numbers and time stamps */
3267 pthread_mutex_lock(&mMutex);
3268 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003269 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3270 if(idx < 0) {
3271 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3272 last_urgent_frame_number);
3273 mState = ERROR;
3274 pthread_mutex_unlock(&mMutex);
3275 return;
3276 }
3277 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003278 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3279 first_urgent_frame_number;
3280
3281 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3282 urgent_frame_number_valid,
3283 first_urgent_frame_number, last_urgent_frame_number);
3284 }
3285
3286 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003287 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3288 if(idx < 0) {
3289 LOGE("Invalid frame number received: %d. Irrecoverable error",
3290 last_frame_number);
3291 mState = ERROR;
3292 pthread_mutex_unlock(&mMutex);
3293 return;
3294 }
3295 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003296 frameNumDiff = last_frame_number + 1 -
3297 first_frame_number;
3298 mPendingBatchMap.removeItem(last_frame_number);
3299
3300 LOGD("frm: valid: %d frm_num: %d - %d",
3301 frame_number_valid,
3302 first_frame_number, last_frame_number);
3303
3304 }
3305 pthread_mutex_unlock(&mMutex);
3306
3307 if (urgent_frame_number_valid || frame_number_valid) {
3308 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3309 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3310 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3311 urgentFrameNumDiff, last_urgent_frame_number);
3312 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3313 LOGE("frameNumDiff: %d frameNum: %d",
3314 frameNumDiff, last_frame_number);
3315 }
3316
3317 for (size_t i = 0; i < loopCount; i++) {
3318 /* handleMetadataWithLock is called even for invalid_metadata for
3319 * pipeline depth calculation */
3320 if (!invalid_metadata) {
3321 /* Infer frame number. Batch metadata contains frame number of the
3322 * last frame */
3323 if (urgent_frame_number_valid) {
3324 if (i < urgentFrameNumDiff) {
3325 urgent_frame_number =
3326 first_urgent_frame_number + i;
3327 LOGD("inferred urgent frame_number: %d",
3328 urgent_frame_number);
3329 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3330 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3331 } else {
3332 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3333 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3334 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3335 }
3336 }
3337
3338 /* Infer frame number. Batch metadata contains frame number of the
3339 * last frame */
3340 if (frame_number_valid) {
3341 if (i < frameNumDiff) {
3342 frame_number = first_frame_number + i;
3343 LOGD("inferred frame_number: %d", frame_number);
3344 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3345 CAM_INTF_META_FRAME_NUMBER, frame_number);
3346 } else {
3347 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3348 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3349 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3350 }
3351 }
3352
3353 if (last_frame_capture_time) {
3354 //Infer timestamp
3355 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003356 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003357 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003358 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003359 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3360 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3361 LOGD("batch capture_time: %lld, capture_time: %lld",
3362 last_frame_capture_time, capture_time);
3363 }
3364 }
3365 pthread_mutex_lock(&mMutex);
3366 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003367 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003368 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3369 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003370 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003371 pthread_mutex_unlock(&mMutex);
3372 }
3373
3374 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003375 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003376 mMetadataChannel->bufDone(metadata_buf);
3377 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003378 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003379 }
3380}
3381
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003382void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3383 camera3_error_msg_code_t errorCode)
3384{
3385 camera3_notify_msg_t notify_msg;
3386 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3387 notify_msg.type = CAMERA3_MSG_ERROR;
3388 notify_msg.message.error.error_code = errorCode;
3389 notify_msg.message.error.error_stream = NULL;
3390 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003391 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003392
3393 return;
3394}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003395
3396/*===========================================================================
3397 * FUNCTION : sendPartialMetadataWithLock
3398 *
3399 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3400 *
3401 * PARAMETERS : @metadata: metadata buffer
3402 * @requestIter: The iterator for the pending capture request for
3403 * which the partial result is being sen
3404 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3405 * last urgent metadata in a batch. Always true for non-batch mode
3406 *
3407 * RETURN :
3408 *
3409 *==========================================================================*/
3410
3411void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3412 metadata_buffer_t *metadata,
3413 const pendingRequestIterator requestIter,
3414 bool lastUrgentMetadataInBatch)
3415{
3416 camera3_capture_result_t result;
3417 memset(&result, 0, sizeof(camera3_capture_result_t));
3418
3419 requestIter->partial_result_cnt++;
3420
3421 // Extract 3A metadata
3422 result.result = translateCbUrgentMetadataToResultMetadata(
3423 metadata, lastUrgentMetadataInBatch);
3424 // Populate metadata result
3425 result.frame_number = requestIter->frame_number;
3426 result.num_output_buffers = 0;
3427 result.output_buffers = NULL;
3428 result.partial_result = requestIter->partial_result_cnt;
3429
3430 {
3431 Mutex::Autolock l(gHdrPlusClientLock);
3432 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3433 // Notify HDR+ client about the partial metadata.
3434 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3435 result.partial_result == PARTIAL_RESULT_COUNT);
3436 }
3437 }
3438
3439 orchestrateResult(&result);
3440 LOGD("urgent frame_number = %u", result.frame_number);
3441 free_camera_metadata((camera_metadata_t *)result.result);
3442}
3443
Thierry Strudel3d639192016-09-09 11:52:26 -07003444/*===========================================================================
3445 * FUNCTION : handleMetadataWithLock
3446 *
3447 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3448 *
3449 * PARAMETERS : @metadata_buf: metadata buffer
3450 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3451 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003452 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3453 * last urgent metadata in a batch. Always true for non-batch mode
3454 * @lastMetadataInBatch: Boolean to indicate whether this is the
3455 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003456 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3457 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003458 *
3459 * RETURN :
3460 *
3461 *==========================================================================*/
3462void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003463 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003464 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3465 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003466{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003467 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003468 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3469 //during flush do not send metadata from this thread
3470 LOGD("not sending metadata during flush or when mState is error");
3471 if (free_and_bufdone_meta_buf) {
3472 mMetadataChannel->bufDone(metadata_buf);
3473 free(metadata_buf);
3474 }
3475 return;
3476 }
3477
3478 //not in flush
3479 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3480 int32_t frame_number_valid, urgent_frame_number_valid;
3481 uint32_t frame_number, urgent_frame_number;
3482 int64_t capture_time;
3483 nsecs_t currentSysTime;
3484
3485 int32_t *p_frame_number_valid =
3486 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3487 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3488 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3489 int32_t *p_urgent_frame_number_valid =
3490 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3491 uint32_t *p_urgent_frame_number =
3492 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3493 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3494 metadata) {
3495 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3496 *p_frame_number_valid, *p_frame_number);
3497 }
3498
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003499 camera_metadata_t *resultMetadata = nullptr;
3500
Thierry Strudel3d639192016-09-09 11:52:26 -07003501 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3502 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3503 LOGE("Invalid metadata");
3504 if (free_and_bufdone_meta_buf) {
3505 mMetadataChannel->bufDone(metadata_buf);
3506 free(metadata_buf);
3507 }
3508 goto done_metadata;
3509 }
3510 frame_number_valid = *p_frame_number_valid;
3511 frame_number = *p_frame_number;
3512 capture_time = *p_capture_time;
3513 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3514 urgent_frame_number = *p_urgent_frame_number;
3515 currentSysTime = systemTime(CLOCK_MONOTONIC);
3516
3517 // Detect if buffers from any requests are overdue
3518 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003519 int64_t timeout;
3520 {
3521 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3522 // If there is a pending HDR+ request, the following requests may be blocked until the
3523 // HDR+ request is done. So allow a longer timeout.
3524 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3525 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3526 }
3527
3528 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003529 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003530 assert(missed.stream->priv);
3531 if (missed.stream->priv) {
3532 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3533 assert(ch->mStreams[0]);
3534 if (ch->mStreams[0]) {
3535 LOGE("Cancel missing frame = %d, buffer = %p,"
3536 "stream type = %d, stream format = %d",
3537 req.frame_number, missed.buffer,
3538 ch->mStreams[0]->getMyType(), missed.stream->format);
3539 ch->timeoutFrame(req.frame_number);
3540 }
3541 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003542 }
3543 }
3544 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003545 //For the very first metadata callback, regardless whether it contains valid
3546 //frame number, send the partial metadata for the jumpstarting requests.
3547 //Note that this has to be done even if the metadata doesn't contain valid
3548 //urgent frame number, because in the case only 1 request is ever submitted
3549 //to HAL, there won't be subsequent valid urgent frame number.
3550 if (mFirstMetadataCallback) {
3551 for (pendingRequestIterator i =
3552 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3553 if (i->bUseFirstPartial) {
3554 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3555 }
3556 }
3557 mFirstMetadataCallback = false;
3558 }
3559
Thierry Strudel3d639192016-09-09 11:52:26 -07003560 //Partial result on process_capture_result for timestamp
3561 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003562 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003563
3564 //Recieved an urgent Frame Number, handle it
3565 //using partial results
3566 for (pendingRequestIterator i =
3567 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3568 LOGD("Iterator Frame = %d urgent frame = %d",
3569 i->frame_number, urgent_frame_number);
3570
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003571 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003572 (i->partial_result_cnt == 0)) {
3573 LOGE("Error: HAL missed urgent metadata for frame number %d",
3574 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003575 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003576 }
3577
3578 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003579 i->partial_result_cnt == 0) {
3580 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003581 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3582 // Instant AEC settled for this frame.
3583 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3584 mInstantAECSettledFrameNumber = urgent_frame_number;
3585 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003586 break;
3587 }
3588 }
3589 }
3590
3591 if (!frame_number_valid) {
3592 LOGD("Not a valid normal frame number, used as SOF only");
3593 if (free_and_bufdone_meta_buf) {
3594 mMetadataChannel->bufDone(metadata_buf);
3595 free(metadata_buf);
3596 }
3597 goto done_metadata;
3598 }
3599 LOGH("valid frame_number = %u, capture_time = %lld",
3600 frame_number, capture_time);
3601
Emilian Peev7650c122017-01-19 08:24:33 -08003602 if (metadata->is_depth_data_valid) {
3603 handleDepthDataLocked(metadata->depth_data, frame_number);
3604 }
3605
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003606 // Check whether any stream buffer corresponding to this is dropped or not
3607 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3608 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3609 for (auto & pendingRequest : mPendingRequestsList) {
3610 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3611 mInstantAECSettledFrameNumber)) {
3612 camera3_notify_msg_t notify_msg = {};
3613 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003614 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003615 QCamera3ProcessingChannel *channel =
3616 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003617 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003618 if (p_cam_frame_drop) {
3619 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003620 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003621 // Got the stream ID for drop frame.
3622 dropFrame = true;
3623 break;
3624 }
3625 }
3626 } else {
3627 // This is instant AEC case.
3628 // For instant AEC drop the stream untill AEC is settled.
3629 dropFrame = true;
3630 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003631
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003632 if (dropFrame) {
3633 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3634 if (p_cam_frame_drop) {
3635 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003636 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003637 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003638 } else {
3639 // For instant AEC, inform frame drop and frame number
3640 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3641 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003642 pendingRequest.frame_number, streamID,
3643 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003644 }
3645 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003646 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003647 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003648 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003649 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003650 if (p_cam_frame_drop) {
3651 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003652 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003653 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003654 } else {
3655 // For instant AEC, inform frame drop and frame number
3656 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3657 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003658 pendingRequest.frame_number, streamID,
3659 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003660 }
3661 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003662 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003663 PendingFrameDrop.stream_ID = streamID;
3664 // Add the Frame drop info to mPendingFrameDropList
3665 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003666 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003667 }
3668 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003669 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003670
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003671 for (auto & pendingRequest : mPendingRequestsList) {
3672 // Find the pending request with the frame number.
3673 if (pendingRequest.frame_number == frame_number) {
3674 // Update the sensor timestamp.
3675 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003676
Thierry Strudel3d639192016-09-09 11:52:26 -07003677
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003678 /* Set the timestamp in display metadata so that clients aware of
3679 private_handle such as VT can use this un-modified timestamps.
3680 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003681 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003682
Thierry Strudel3d639192016-09-09 11:52:26 -07003683 // Find channel requiring metadata, meaning internal offline postprocess
3684 // is needed.
3685 //TODO: for now, we don't support two streams requiring metadata at the same time.
3686 // (because we are not making copies, and metadata buffer is not reference counted.
3687 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003688 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3689 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003690 if (iter->need_metadata) {
3691 internalPproc = true;
3692 QCamera3ProcessingChannel *channel =
3693 (QCamera3ProcessingChannel *)iter->stream->priv;
3694 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003695 if(p_is_metabuf_queued != NULL) {
3696 *p_is_metabuf_queued = true;
3697 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003698 break;
3699 }
3700 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003701 for (auto itr = pendingRequest.internalRequestList.begin();
3702 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003703 if (itr->need_metadata) {
3704 internalPproc = true;
3705 QCamera3ProcessingChannel *channel =
3706 (QCamera3ProcessingChannel *)itr->stream->priv;
3707 channel->queueReprocMetadata(metadata_buf);
3708 break;
3709 }
3710 }
3711
Thierry Strudel54dc9782017-02-15 12:12:10 -08003712 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003713
3714 bool *enableZsl = nullptr;
3715 if (gExposeEnableZslKey) {
3716 enableZsl = &pendingRequest.enableZsl;
3717 }
3718
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003719 resultMetadata = translateFromHalMetadata(metadata,
3720 pendingRequest.timestamp, pendingRequest.request_id,
3721 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3722 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003723 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003724 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003725 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003726 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003727 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003728 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003729
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003730 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003731
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003732 if (pendingRequest.blob_request) {
3733 //Dump tuning metadata if enabled and available
3734 char prop[PROPERTY_VALUE_MAX];
3735 memset(prop, 0, sizeof(prop));
3736 property_get("persist.camera.dumpmetadata", prop, "0");
3737 int32_t enabled = atoi(prop);
3738 if (enabled && metadata->is_tuning_params_valid) {
3739 dumpMetadataToFile(metadata->tuning_params,
3740 mMetaFrameCount,
3741 enabled,
3742 "Snapshot",
3743 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003744 }
3745 }
3746
3747 if (!internalPproc) {
3748 LOGD("couldn't find need_metadata for this metadata");
3749 // Return metadata buffer
3750 if (free_and_bufdone_meta_buf) {
3751 mMetadataChannel->bufDone(metadata_buf);
3752 free(metadata_buf);
3753 }
3754 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003755
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003756 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003757 }
3758 }
3759
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003760 // Try to send out shutter callbacks and capture results.
3761 handlePendingResultsWithLock(frame_number, resultMetadata);
3762 return;
3763
Thierry Strudel3d639192016-09-09 11:52:26 -07003764done_metadata:
3765 for (pendingRequestIterator i = mPendingRequestsList.begin();
3766 i != mPendingRequestsList.end() ;i++) {
3767 i->pipeline_depth++;
3768 }
3769 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3770 unblockRequestIfNecessary();
3771}
3772
3773/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003774 * FUNCTION : handleDepthDataWithLock
3775 *
3776 * DESCRIPTION: Handles incoming depth data
3777 *
3778 * PARAMETERS : @depthData : Depth data
3779 * @frameNumber: Frame number of the incoming depth data
3780 *
3781 * RETURN :
3782 *
3783 *==========================================================================*/
3784void QCamera3HardwareInterface::handleDepthDataLocked(
3785 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3786 uint32_t currentFrameNumber;
3787 buffer_handle_t *depthBuffer;
3788
3789 if (nullptr == mDepthChannel) {
3790 LOGE("Depth channel not present!");
3791 return;
3792 }
3793
3794 camera3_stream_buffer_t resultBuffer =
3795 {.acquire_fence = -1,
3796 .release_fence = -1,
3797 .status = CAMERA3_BUFFER_STATUS_OK,
3798 .buffer = nullptr,
3799 .stream = mDepthChannel->getStream()};
3800 camera3_capture_result_t result =
3801 {.result = nullptr,
3802 .num_output_buffers = 1,
3803 .output_buffers = &resultBuffer,
3804 .partial_result = 0,
3805 .frame_number = 0};
3806
3807 do {
3808 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3809 if (nullptr == depthBuffer) {
3810 break;
3811 }
3812
3813 result.frame_number = currentFrameNumber;
3814 resultBuffer.buffer = depthBuffer;
3815 if (currentFrameNumber == frameNumber) {
3816 int32_t rc = mDepthChannel->populateDepthData(depthData,
3817 frameNumber);
3818 if (NO_ERROR != rc) {
3819 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3820 } else {
3821 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3822 }
3823 } else if (currentFrameNumber > frameNumber) {
3824 break;
3825 } else {
3826 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3827 {{currentFrameNumber, mDepthChannel->getStream(),
3828 CAMERA3_MSG_ERROR_BUFFER}}};
3829 orchestrateNotify(&notify_msg);
3830
3831 LOGE("Depth buffer for frame number: %d is missing "
3832 "returning back!", currentFrameNumber);
3833 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3834 }
3835 mDepthChannel->unmapBuffer(currentFrameNumber);
3836
3837 orchestrateResult(&result);
3838 } while (currentFrameNumber < frameNumber);
3839}
3840
3841/*===========================================================================
3842 * FUNCTION : notifyErrorFoPendingDepthData
3843 *
3844 * DESCRIPTION: Returns error for any pending depth buffers
3845 *
3846 * PARAMETERS : depthCh - depth channel that needs to get flushed
3847 *
3848 * RETURN :
3849 *
3850 *==========================================================================*/
3851void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3852 QCamera3DepthChannel *depthCh) {
3853 uint32_t currentFrameNumber;
3854 buffer_handle_t *depthBuffer;
3855
3856 if (nullptr == depthCh) {
3857 return;
3858 }
3859
3860 camera3_notify_msg_t notify_msg =
3861 {.type = CAMERA3_MSG_ERROR,
3862 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3863 camera3_stream_buffer_t resultBuffer =
3864 {.acquire_fence = -1,
3865 .release_fence = -1,
3866 .buffer = nullptr,
3867 .stream = depthCh->getStream(),
3868 .status = CAMERA3_BUFFER_STATUS_ERROR};
3869 camera3_capture_result_t result =
3870 {.result = nullptr,
3871 .frame_number = 0,
3872 .num_output_buffers = 1,
3873 .partial_result = 0,
3874 .output_buffers = &resultBuffer};
3875
3876 while (nullptr !=
3877 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3878 depthCh->unmapBuffer(currentFrameNumber);
3879
3880 notify_msg.message.error.frame_number = currentFrameNumber;
3881 orchestrateNotify(&notify_msg);
3882
3883 resultBuffer.buffer = depthBuffer;
3884 result.frame_number = currentFrameNumber;
3885 orchestrateResult(&result);
3886 };
3887}
3888
3889/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003890 * FUNCTION : hdrPlusPerfLock
3891 *
3892 * DESCRIPTION: perf lock for HDR+ using custom intent
3893 *
3894 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3895 *
3896 * RETURN : None
3897 *
3898 *==========================================================================*/
3899void QCamera3HardwareInterface::hdrPlusPerfLock(
3900 mm_camera_super_buf_t *metadata_buf)
3901{
3902 if (NULL == metadata_buf) {
3903 LOGE("metadata_buf is NULL");
3904 return;
3905 }
3906 metadata_buffer_t *metadata =
3907 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3908 int32_t *p_frame_number_valid =
3909 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3910 uint32_t *p_frame_number =
3911 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3912
3913 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3914 LOGE("%s: Invalid metadata", __func__);
3915 return;
3916 }
3917
3918 //acquire perf lock for 5 sec after the last HDR frame is captured
3919 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3920 if ((p_frame_number != NULL) &&
3921 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003922 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003923 }
3924 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003925}
3926
3927/*===========================================================================
3928 * FUNCTION : handleInputBufferWithLock
3929 *
3930 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3931 *
3932 * PARAMETERS : @frame_number: frame number of the input buffer
3933 *
3934 * RETURN :
3935 *
3936 *==========================================================================*/
3937void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3938{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003939 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003940 pendingRequestIterator i = mPendingRequestsList.begin();
3941 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3942 i++;
3943 }
3944 if (i != mPendingRequestsList.end() && i->input_buffer) {
3945 //found the right request
3946 if (!i->shutter_notified) {
3947 CameraMetadata settings;
3948 camera3_notify_msg_t notify_msg;
3949 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3950 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3951 if(i->settings) {
3952 settings = i->settings;
3953 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3954 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3955 } else {
3956 LOGE("No timestamp in input settings! Using current one.");
3957 }
3958 } else {
3959 LOGE("Input settings missing!");
3960 }
3961
3962 notify_msg.type = CAMERA3_MSG_SHUTTER;
3963 notify_msg.message.shutter.frame_number = frame_number;
3964 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003965 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003966 i->shutter_notified = true;
3967 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3968 i->frame_number, notify_msg.message.shutter.timestamp);
3969 }
3970
3971 if (i->input_buffer->release_fence != -1) {
3972 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3973 close(i->input_buffer->release_fence);
3974 if (rc != OK) {
3975 LOGE("input buffer sync wait failed %d", rc);
3976 }
3977 }
3978
3979 camera3_capture_result result;
3980 memset(&result, 0, sizeof(camera3_capture_result));
3981 result.frame_number = frame_number;
3982 result.result = i->settings;
3983 result.input_buffer = i->input_buffer;
3984 result.partial_result = PARTIAL_RESULT_COUNT;
3985
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003986 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003987 LOGD("Input request metadata and input buffer frame_number = %u",
3988 i->frame_number);
3989 i = erasePendingRequest(i);
3990 } else {
3991 LOGE("Could not find input request for frame number %d", frame_number);
3992 }
3993}
3994
3995/*===========================================================================
3996 * FUNCTION : handleBufferWithLock
3997 *
3998 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3999 *
4000 * PARAMETERS : @buffer: image buffer for the callback
4001 * @frame_number: frame number of the image buffer
4002 *
4003 * RETURN :
4004 *
4005 *==========================================================================*/
4006void QCamera3HardwareInterface::handleBufferWithLock(
4007 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4008{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004009 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004010
4011 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4012 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4013 }
4014
Thierry Strudel3d639192016-09-09 11:52:26 -07004015 /* Nothing to be done during error state */
4016 if ((ERROR == mState) || (DEINIT == mState)) {
4017 return;
4018 }
4019 if (mFlushPerf) {
4020 handleBuffersDuringFlushLock(buffer);
4021 return;
4022 }
4023 //not in flush
4024 // If the frame number doesn't exist in the pending request list,
4025 // directly send the buffer to the frameworks, and update pending buffers map
4026 // Otherwise, book-keep the buffer.
4027 pendingRequestIterator i = mPendingRequestsList.begin();
4028 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4029 i++;
4030 }
4031 if (i == mPendingRequestsList.end()) {
4032 // Verify all pending requests frame_numbers are greater
4033 for (pendingRequestIterator j = mPendingRequestsList.begin();
4034 j != mPendingRequestsList.end(); j++) {
4035 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
4036 LOGW("Error: pending live frame number %d is smaller than %d",
4037 j->frame_number, frame_number);
4038 }
4039 }
4040 camera3_capture_result_t result;
4041 memset(&result, 0, sizeof(camera3_capture_result_t));
4042 result.result = NULL;
4043 result.frame_number = frame_number;
4044 result.num_output_buffers = 1;
4045 result.partial_result = 0;
4046 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4047 m != mPendingFrameDropList.end(); m++) {
4048 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4049 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4050 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4051 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4052 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4053 frame_number, streamID);
4054 m = mPendingFrameDropList.erase(m);
4055 break;
4056 }
4057 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004058 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07004059 result.output_buffers = buffer;
4060 LOGH("result frame_number = %d, buffer = %p",
4061 frame_number, buffer->buffer);
4062
4063 mPendingBuffersMap.removeBuf(buffer->buffer);
4064
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004065 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004066 } else {
4067 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004068 if (i->input_buffer->release_fence != -1) {
4069 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
4070 close(i->input_buffer->release_fence);
4071 if (rc != OK) {
4072 LOGE("input buffer sync wait failed %d", rc);
4073 }
4074 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004075 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004076
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004077 // Put buffer into the pending request
4078 for (auto &requestedBuffer : i->buffers) {
4079 if (requestedBuffer.stream == buffer->stream) {
4080 if (requestedBuffer.buffer != nullptr) {
4081 LOGE("Error: buffer is already set");
4082 } else {
4083 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
4084 sizeof(camera3_stream_buffer_t));
4085 *(requestedBuffer.buffer) = *buffer;
4086 LOGH("cache buffer %p at result frame_number %u",
4087 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07004088 }
4089 }
4090 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004091
4092 if (i->input_buffer) {
4093 // For a reprocessing request, try to send out shutter callback and result metadata.
4094 handlePendingResultsWithLock(frame_number, nullptr);
4095 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004096 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004097
4098 if (mPreviewStarted == false) {
4099 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4100 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004101 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4102
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004103 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4104 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4105 mPreviewStarted = true;
4106
4107 // Set power hint for preview
4108 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4109 }
4110 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004111}
4112
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004113void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4114 const camera_metadata_t *resultMetadata)
4115{
4116 // Find the pending request for this result metadata.
4117 auto requestIter = mPendingRequestsList.begin();
4118 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4119 requestIter++;
4120 }
4121
4122 if (requestIter == mPendingRequestsList.end()) {
4123 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4124 return;
4125 }
4126
4127 // Update the result metadata
4128 requestIter->resultMetadata = resultMetadata;
4129
4130 // Check what type of request this is.
4131 bool liveRequest = false;
4132 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004133 // HDR+ request doesn't have partial results.
4134 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004135 } else if (requestIter->input_buffer != nullptr) {
4136 // Reprocessing request result is the same as settings.
4137 requestIter->resultMetadata = requestIter->settings;
4138 // Reprocessing request doesn't have partial results.
4139 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4140 } else {
4141 liveRequest = true;
4142 requestIter->partial_result_cnt++;
4143 mPendingLiveRequest--;
4144
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004145 {
4146 Mutex::Autolock l(gHdrPlusClientLock);
4147 // For a live request, send the metadata to HDR+ client.
4148 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4149 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4150 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4151 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004152 }
4153 }
4154
4155 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4156 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4157 bool readyToSend = true;
4158
4159 // Iterate through the pending requests to send out shutter callbacks and results that are
4160 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4161 // live requests that don't have result metadata yet.
4162 auto iter = mPendingRequestsList.begin();
4163 while (iter != mPendingRequestsList.end()) {
4164 // Check if current pending request is ready. If it's not ready, the following pending
4165 // requests are also not ready.
4166 if (readyToSend && iter->resultMetadata == nullptr) {
4167 readyToSend = false;
4168 }
4169
4170 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4171
4172 std::vector<camera3_stream_buffer_t> outputBuffers;
4173
4174 camera3_capture_result_t result = {};
4175 result.frame_number = iter->frame_number;
4176 result.result = iter->resultMetadata;
4177 result.partial_result = iter->partial_result_cnt;
4178
4179 // If this pending buffer has result metadata, we may be able to send out shutter callback
4180 // and result metadata.
4181 if (iter->resultMetadata != nullptr) {
4182 if (!readyToSend) {
4183 // If any of the previous pending request is not ready, this pending request is
4184 // also not ready to send in order to keep shutter callbacks and result metadata
4185 // in order.
4186 iter++;
4187 continue;
4188 }
4189
4190 // Invoke shutter callback if not yet.
4191 if (!iter->shutter_notified) {
4192 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4193
4194 // Find the timestamp in HDR+ result metadata
4195 camera_metadata_ro_entry_t entry;
4196 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4197 ANDROID_SENSOR_TIMESTAMP, &entry);
4198 if (res != OK) {
4199 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4200 __FUNCTION__, iter->frame_number, strerror(-res), res);
4201 } else {
4202 timestamp = entry.data.i64[0];
4203 }
4204
4205 camera3_notify_msg_t notify_msg = {};
4206 notify_msg.type = CAMERA3_MSG_SHUTTER;
4207 notify_msg.message.shutter.frame_number = iter->frame_number;
4208 notify_msg.message.shutter.timestamp = timestamp;
4209 orchestrateNotify(&notify_msg);
4210 iter->shutter_notified = true;
4211 }
4212
4213 result.input_buffer = iter->input_buffer;
4214
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004215 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4216 // If the result metadata belongs to a live request, notify errors for previous pending
4217 // live requests.
4218 mPendingLiveRequest--;
4219
4220 CameraMetadata dummyMetadata;
4221 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4222 result.result = dummyMetadata.release();
4223
4224 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004225
4226 // partial_result should be PARTIAL_RESULT_CNT in case of
4227 // ERROR_RESULT.
4228 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4229 result.partial_result = PARTIAL_RESULT_COUNT;
4230
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004231 } else {
4232 iter++;
4233 continue;
4234 }
4235
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004236 // Prepare output buffer array
4237 for (auto bufferInfoIter = iter->buffers.begin();
4238 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4239 if (bufferInfoIter->buffer != nullptr) {
4240
4241 QCamera3Channel *channel =
4242 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4243 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4244
4245 // Check if this buffer is a dropped frame.
4246 auto frameDropIter = mPendingFrameDropList.begin();
4247 while (frameDropIter != mPendingFrameDropList.end()) {
4248 if((frameDropIter->stream_ID == streamID) &&
4249 (frameDropIter->frame_number == frameNumber)) {
4250 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4251 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4252 streamID);
4253 mPendingFrameDropList.erase(frameDropIter);
4254 break;
4255 } else {
4256 frameDropIter++;
4257 }
4258 }
4259
4260 // Check buffer error status
4261 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4262 bufferInfoIter->buffer->buffer);
4263 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4264
4265 outputBuffers.push_back(*(bufferInfoIter->buffer));
4266 free(bufferInfoIter->buffer);
4267 bufferInfoIter->buffer = NULL;
4268 }
4269 }
4270
4271 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4272 result.num_output_buffers = outputBuffers.size();
4273
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004274 orchestrateResult(&result);
4275
4276 // For reprocessing, result metadata is the same as settings so do not free it here to
4277 // avoid double free.
4278 if (result.result != iter->settings) {
4279 free_camera_metadata((camera_metadata_t *)result.result);
4280 }
4281 iter->resultMetadata = nullptr;
4282 iter = erasePendingRequest(iter);
4283 }
4284
4285 if (liveRequest) {
4286 for (auto &iter : mPendingRequestsList) {
4287 // Increment pipeline depth for the following pending requests.
4288 if (iter.frame_number > frameNumber) {
4289 iter.pipeline_depth++;
4290 }
4291 }
4292 }
4293
4294 unblockRequestIfNecessary();
4295}
4296
Thierry Strudel3d639192016-09-09 11:52:26 -07004297/*===========================================================================
4298 * FUNCTION : unblockRequestIfNecessary
4299 *
4300 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4301 * that mMutex is held when this function is called.
4302 *
4303 * PARAMETERS :
4304 *
4305 * RETURN :
4306 *
4307 *==========================================================================*/
4308void QCamera3HardwareInterface::unblockRequestIfNecessary()
4309{
4310 // Unblock process_capture_request
4311 pthread_cond_signal(&mRequestCond);
4312}
4313
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004314/*===========================================================================
4315 * FUNCTION : isHdrSnapshotRequest
4316 *
4317 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4318 *
4319 * PARAMETERS : camera3 request structure
4320 *
4321 * RETURN : boolean decision variable
4322 *
4323 *==========================================================================*/
4324bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4325{
4326 if (request == NULL) {
4327 LOGE("Invalid request handle");
4328 assert(0);
4329 return false;
4330 }
4331
4332 if (!mForceHdrSnapshot) {
4333 CameraMetadata frame_settings;
4334 frame_settings = request->settings;
4335
4336 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4337 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4338 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4339 return false;
4340 }
4341 } else {
4342 return false;
4343 }
4344
4345 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4346 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4347 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4348 return false;
4349 }
4350 } else {
4351 return false;
4352 }
4353 }
4354
4355 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4356 if (request->output_buffers[i].stream->format
4357 == HAL_PIXEL_FORMAT_BLOB) {
4358 return true;
4359 }
4360 }
4361
4362 return false;
4363}
4364/*===========================================================================
4365 * FUNCTION : orchestrateRequest
4366 *
4367 * DESCRIPTION: Orchestrates a capture request from camera service
4368 *
4369 * PARAMETERS :
4370 * @request : request from framework to process
4371 *
4372 * RETURN : Error status codes
4373 *
4374 *==========================================================================*/
4375int32_t QCamera3HardwareInterface::orchestrateRequest(
4376 camera3_capture_request_t *request)
4377{
4378
4379 uint32_t originalFrameNumber = request->frame_number;
4380 uint32_t originalOutputCount = request->num_output_buffers;
4381 const camera_metadata_t *original_settings = request->settings;
4382 List<InternalRequest> internallyRequestedStreams;
4383 List<InternalRequest> emptyInternalList;
4384
4385 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4386 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4387 uint32_t internalFrameNumber;
4388 CameraMetadata modified_meta;
4389
4390
4391 /* Add Blob channel to list of internally requested streams */
4392 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4393 if (request->output_buffers[i].stream->format
4394 == HAL_PIXEL_FORMAT_BLOB) {
4395 InternalRequest streamRequested;
4396 streamRequested.meteringOnly = 1;
4397 streamRequested.need_metadata = 0;
4398 streamRequested.stream = request->output_buffers[i].stream;
4399 internallyRequestedStreams.push_back(streamRequested);
4400 }
4401 }
4402 request->num_output_buffers = 0;
4403 auto itr = internallyRequestedStreams.begin();
4404
4405 /* Modify setting to set compensation */
4406 modified_meta = request->settings;
4407 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4408 uint8_t aeLock = 1;
4409 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4410 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4411 camera_metadata_t *modified_settings = modified_meta.release();
4412 request->settings = modified_settings;
4413
4414 /* Capture Settling & -2x frame */
4415 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4416 request->frame_number = internalFrameNumber;
4417 processCaptureRequest(request, internallyRequestedStreams);
4418
4419 request->num_output_buffers = originalOutputCount;
4420 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4421 request->frame_number = internalFrameNumber;
4422 processCaptureRequest(request, emptyInternalList);
4423 request->num_output_buffers = 0;
4424
4425 modified_meta = modified_settings;
4426 expCompensation = 0;
4427 aeLock = 1;
4428 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4429 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4430 modified_settings = modified_meta.release();
4431 request->settings = modified_settings;
4432
4433 /* Capture Settling & 0X frame */
4434
4435 itr = internallyRequestedStreams.begin();
4436 if (itr == internallyRequestedStreams.end()) {
4437 LOGE("Error Internally Requested Stream list is empty");
4438 assert(0);
4439 } else {
4440 itr->need_metadata = 0;
4441 itr->meteringOnly = 1;
4442 }
4443
4444 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4445 request->frame_number = internalFrameNumber;
4446 processCaptureRequest(request, internallyRequestedStreams);
4447
4448 itr = internallyRequestedStreams.begin();
4449 if (itr == internallyRequestedStreams.end()) {
4450 ALOGE("Error Internally Requested Stream list is empty");
4451 assert(0);
4452 } else {
4453 itr->need_metadata = 1;
4454 itr->meteringOnly = 0;
4455 }
4456
4457 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4458 request->frame_number = internalFrameNumber;
4459 processCaptureRequest(request, internallyRequestedStreams);
4460
4461 /* Capture 2X frame*/
4462 modified_meta = modified_settings;
4463 expCompensation = GB_HDR_2X_STEP_EV;
4464 aeLock = 1;
4465 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4466 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4467 modified_settings = modified_meta.release();
4468 request->settings = modified_settings;
4469
4470 itr = internallyRequestedStreams.begin();
4471 if (itr == internallyRequestedStreams.end()) {
4472 ALOGE("Error Internally Requested Stream list is empty");
4473 assert(0);
4474 } else {
4475 itr->need_metadata = 0;
4476 itr->meteringOnly = 1;
4477 }
4478 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4479 request->frame_number = internalFrameNumber;
4480 processCaptureRequest(request, internallyRequestedStreams);
4481
4482 itr = internallyRequestedStreams.begin();
4483 if (itr == internallyRequestedStreams.end()) {
4484 ALOGE("Error Internally Requested Stream list is empty");
4485 assert(0);
4486 } else {
4487 itr->need_metadata = 1;
4488 itr->meteringOnly = 0;
4489 }
4490
4491 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4492 request->frame_number = internalFrameNumber;
4493 processCaptureRequest(request, internallyRequestedStreams);
4494
4495
4496 /* Capture 2X on original streaming config*/
4497 internallyRequestedStreams.clear();
4498
4499 /* Restore original settings pointer */
4500 request->settings = original_settings;
4501 } else {
4502 uint32_t internalFrameNumber;
4503 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4504 request->frame_number = internalFrameNumber;
4505 return processCaptureRequest(request, internallyRequestedStreams);
4506 }
4507
4508 return NO_ERROR;
4509}
4510
4511/*===========================================================================
4512 * FUNCTION : orchestrateResult
4513 *
4514 * DESCRIPTION: Orchestrates a capture result to camera service
4515 *
4516 * PARAMETERS :
4517 * @request : request from framework to process
4518 *
4519 * RETURN :
4520 *
4521 *==========================================================================*/
4522void QCamera3HardwareInterface::orchestrateResult(
4523 camera3_capture_result_t *result)
4524{
4525 uint32_t frameworkFrameNumber;
4526 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4527 frameworkFrameNumber);
4528 if (rc != NO_ERROR) {
4529 LOGE("Cannot find translated frameworkFrameNumber");
4530 assert(0);
4531 } else {
4532 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004533 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004534 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004535 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004536 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4537 camera_metadata_entry_t entry;
4538 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4539 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004540 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004541 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4542 if (ret != OK)
4543 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004544 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004545 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004546 result->frame_number = frameworkFrameNumber;
4547 mCallbackOps->process_capture_result(mCallbackOps, result);
4548 }
4549 }
4550}
4551
4552/*===========================================================================
4553 * FUNCTION : orchestrateNotify
4554 *
4555 * DESCRIPTION: Orchestrates a notify to camera service
4556 *
4557 * PARAMETERS :
4558 * @request : request from framework to process
4559 *
4560 * RETURN :
4561 *
4562 *==========================================================================*/
4563void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4564{
4565 uint32_t frameworkFrameNumber;
4566 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004567 int32_t rc = NO_ERROR;
4568
4569 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004570 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004571
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004572 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004573 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4574 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4575 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004576 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004577 LOGE("Cannot find translated frameworkFrameNumber");
4578 assert(0);
4579 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004580 }
4581 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004582
4583 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4584 LOGD("Internal Request drop the notifyCb");
4585 } else {
4586 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4587 mCallbackOps->notify(mCallbackOps, notify_msg);
4588 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004589}
4590
4591/*===========================================================================
4592 * FUNCTION : FrameNumberRegistry
4593 *
4594 * DESCRIPTION: Constructor
4595 *
4596 * PARAMETERS :
4597 *
4598 * RETURN :
4599 *
4600 *==========================================================================*/
4601FrameNumberRegistry::FrameNumberRegistry()
4602{
4603 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4604}
4605
4606/*===========================================================================
4607 * FUNCTION : ~FrameNumberRegistry
4608 *
4609 * DESCRIPTION: Destructor
4610 *
4611 * PARAMETERS :
4612 *
4613 * RETURN :
4614 *
4615 *==========================================================================*/
4616FrameNumberRegistry::~FrameNumberRegistry()
4617{
4618}
4619
4620/*===========================================================================
4621 * FUNCTION : PurgeOldEntriesLocked
4622 *
4623 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4624 *
4625 * PARAMETERS :
4626 *
4627 * RETURN : NONE
4628 *
4629 *==========================================================================*/
4630void FrameNumberRegistry::purgeOldEntriesLocked()
4631{
4632 while (_register.begin() != _register.end()) {
4633 auto itr = _register.begin();
4634 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4635 _register.erase(itr);
4636 } else {
4637 return;
4638 }
4639 }
4640}
4641
4642/*===========================================================================
4643 * FUNCTION : allocStoreInternalFrameNumber
4644 *
4645 * DESCRIPTION: Method to note down a framework request and associate a new
4646 * internal request number against it
4647 *
4648 * PARAMETERS :
4649 * @fFrameNumber: Identifier given by framework
4650 * @internalFN : Output parameter which will have the newly generated internal
4651 * entry
4652 *
4653 * RETURN : Error code
4654 *
4655 *==========================================================================*/
4656int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4657 uint32_t &internalFrameNumber)
4658{
4659 Mutex::Autolock lock(mRegistryLock);
4660 internalFrameNumber = _nextFreeInternalNumber++;
4661 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4662 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4663 purgeOldEntriesLocked();
4664 return NO_ERROR;
4665}
4666
4667/*===========================================================================
4668 * FUNCTION : generateStoreInternalFrameNumber
4669 *
4670 * DESCRIPTION: Method to associate a new internal request number independent
4671 * of any associate with framework requests
4672 *
4673 * PARAMETERS :
4674 * @internalFrame#: Output parameter which will have the newly generated internal
4675 *
4676 *
4677 * RETURN : Error code
4678 *
4679 *==========================================================================*/
4680int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4681{
4682 Mutex::Autolock lock(mRegistryLock);
4683 internalFrameNumber = _nextFreeInternalNumber++;
4684 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4685 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4686 purgeOldEntriesLocked();
4687 return NO_ERROR;
4688}
4689
4690/*===========================================================================
4691 * FUNCTION : getFrameworkFrameNumber
4692 *
4693 * DESCRIPTION: Method to query the framework framenumber given an internal #
4694 *
4695 * PARAMETERS :
4696 * @internalFrame#: Internal reference
4697 * @frameworkframenumber: Output parameter holding framework frame entry
4698 *
4699 * RETURN : Error code
4700 *
4701 *==========================================================================*/
4702int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4703 uint32_t &frameworkFrameNumber)
4704{
4705 Mutex::Autolock lock(mRegistryLock);
4706 auto itr = _register.find(internalFrameNumber);
4707 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004708 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004709 return -ENOENT;
4710 }
4711
4712 frameworkFrameNumber = itr->second;
4713 purgeOldEntriesLocked();
4714 return NO_ERROR;
4715}
Thierry Strudel3d639192016-09-09 11:52:26 -07004716
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004717status_t QCamera3HardwareInterface::fillPbStreamConfig(
4718 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4719 QCamera3Channel *channel, uint32_t streamIndex) {
4720 if (config == nullptr) {
4721 LOGE("%s: config is null", __FUNCTION__);
4722 return BAD_VALUE;
4723 }
4724
4725 if (channel == nullptr) {
4726 LOGE("%s: channel is null", __FUNCTION__);
4727 return BAD_VALUE;
4728 }
4729
4730 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4731 if (stream == nullptr) {
4732 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4733 return NAME_NOT_FOUND;
4734 }
4735
4736 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4737 if (streamInfo == nullptr) {
4738 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4739 return NAME_NOT_FOUND;
4740 }
4741
4742 config->id = pbStreamId;
4743 config->image.width = streamInfo->dim.width;
4744 config->image.height = streamInfo->dim.height;
4745 config->image.padding = 0;
4746 config->image.format = pbStreamFormat;
4747
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004748 uint32_t totalPlaneSize = 0;
4749
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004750 // Fill plane information.
4751 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4752 pbcamera::PlaneConfiguration plane;
4753 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4754 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4755 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004756
4757 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004758 }
4759
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004760 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004761 return OK;
4762}
4763
Thierry Strudel3d639192016-09-09 11:52:26 -07004764/*===========================================================================
4765 * FUNCTION : processCaptureRequest
4766 *
4767 * DESCRIPTION: process a capture request from camera service
4768 *
4769 * PARAMETERS :
4770 * @request : request from framework to process
4771 *
4772 * RETURN :
4773 *
4774 *==========================================================================*/
4775int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004776 camera3_capture_request_t *request,
4777 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004778{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004779 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004780 int rc = NO_ERROR;
4781 int32_t request_id;
4782 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004783 bool isVidBufRequested = false;
4784 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004785 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004786
4787 pthread_mutex_lock(&mMutex);
4788
4789 // Validate current state
4790 switch (mState) {
4791 case CONFIGURED:
4792 case STARTED:
4793 /* valid state */
4794 break;
4795
4796 case ERROR:
4797 pthread_mutex_unlock(&mMutex);
4798 handleCameraDeviceError();
4799 return -ENODEV;
4800
4801 default:
4802 LOGE("Invalid state %d", mState);
4803 pthread_mutex_unlock(&mMutex);
4804 return -ENODEV;
4805 }
4806
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004807 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004808 if (rc != NO_ERROR) {
4809 LOGE("incoming request is not valid");
4810 pthread_mutex_unlock(&mMutex);
4811 return rc;
4812 }
4813
4814 meta = request->settings;
4815
4816 // For first capture request, send capture intent, and
4817 // stream on all streams
4818 if (mState == CONFIGURED) {
4819 // send an unconfigure to the backend so that the isp
4820 // resources are deallocated
4821 if (!mFirstConfiguration) {
4822 cam_stream_size_info_t stream_config_info;
4823 int32_t hal_version = CAM_HAL_V3;
4824 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4825 stream_config_info.buffer_info.min_buffers =
4826 MIN_INFLIGHT_REQUESTS;
4827 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004828 m_bIs4KVideo ? 0 :
4829 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004830 clear_metadata_buffer(mParameters);
4831 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4832 CAM_INTF_PARM_HAL_VERSION, hal_version);
4833 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4834 CAM_INTF_META_STREAM_INFO, stream_config_info);
4835 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4836 mParameters);
4837 if (rc < 0) {
4838 LOGE("set_parms for unconfigure failed");
4839 pthread_mutex_unlock(&mMutex);
4840 return rc;
4841 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004842
Thierry Strudel3d639192016-09-09 11:52:26 -07004843 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004844 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004845 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004846 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004847 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004848 property_get("persist.camera.is_type", is_type_value, "4");
4849 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4850 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4851 property_get("persist.camera.is_type_preview", is_type_value, "4");
4852 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4853 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004854
4855 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4856 int32_t hal_version = CAM_HAL_V3;
4857 uint8_t captureIntent =
4858 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4859 mCaptureIntent = captureIntent;
4860 clear_metadata_buffer(mParameters);
4861 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4862 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4863 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004864 if (mFirstConfiguration) {
4865 // configure instant AEC
4866 // Instant AEC is a session based parameter and it is needed only
4867 // once per complete session after open camera.
4868 // i.e. This is set only once for the first capture request, after open camera.
4869 setInstantAEC(meta);
4870 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004871 uint8_t fwkVideoStabMode=0;
4872 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4873 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4874 }
4875
Xue Tuecac74e2017-04-17 13:58:15 -07004876 // If EIS setprop is enabled then only turn it on for video/preview
4877 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004878 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004879 int32_t vsMode;
4880 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4881 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4882 rc = BAD_VALUE;
4883 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004884 LOGD("setEis %d", setEis);
4885 bool eis3Supported = false;
4886 size_t count = IS_TYPE_MAX;
4887 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4888 for (size_t i = 0; i < count; i++) {
4889 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4890 eis3Supported = true;
4891 break;
4892 }
4893 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004894
4895 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004896 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004897 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4898 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004899 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4900 is_type = isTypePreview;
4901 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4902 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4903 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004904 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004905 } else {
4906 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004907 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004908 } else {
4909 is_type = IS_TYPE_NONE;
4910 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004911 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004912 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004913 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4914 }
4915 }
4916
4917 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4918 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4919
Thierry Strudel54dc9782017-02-15 12:12:10 -08004920 //Disable tintless only if the property is set to 0
4921 memset(prop, 0, sizeof(prop));
4922 property_get("persist.camera.tintless.enable", prop, "1");
4923 int32_t tintless_value = atoi(prop);
4924
Thierry Strudel3d639192016-09-09 11:52:26 -07004925 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4926 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004927
Thierry Strudel3d639192016-09-09 11:52:26 -07004928 //Disable CDS for HFR mode or if DIS/EIS is on.
4929 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4930 //after every configure_stream
4931 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4932 (m_bIsVideo)) {
4933 int32_t cds = CAM_CDS_MODE_OFF;
4934 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4935 CAM_INTF_PARM_CDS_MODE, cds))
4936 LOGE("Failed to disable CDS for HFR mode");
4937
4938 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004939
4940 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4941 uint8_t* use_av_timer = NULL;
4942
4943 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004944 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004945 use_av_timer = &m_debug_avtimer;
4946 }
4947 else{
4948 use_av_timer =
4949 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004950 if (use_av_timer) {
4951 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4952 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004953 }
4954
4955 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4956 rc = BAD_VALUE;
4957 }
4958 }
4959
Thierry Strudel3d639192016-09-09 11:52:26 -07004960 setMobicat();
4961
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004962 uint8_t nrMode = 0;
4963 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4964 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4965 }
4966
Thierry Strudel3d639192016-09-09 11:52:26 -07004967 /* Set fps and hfr mode while sending meta stream info so that sensor
4968 * can configure appropriate streaming mode */
4969 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004970 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4971 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004972 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4973 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004974 if (rc == NO_ERROR) {
4975 int32_t max_fps =
4976 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004977 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004978 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4979 }
4980 /* For HFR, more buffers are dequeued upfront to improve the performance */
4981 if (mBatchSize) {
4982 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4983 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4984 }
4985 }
4986 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004987 LOGE("setHalFpsRange failed");
4988 }
4989 }
4990 if (meta.exists(ANDROID_CONTROL_MODE)) {
4991 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4992 rc = extractSceneMode(meta, metaMode, mParameters);
4993 if (rc != NO_ERROR) {
4994 LOGE("extractSceneMode failed");
4995 }
4996 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004997 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004998
Thierry Strudel04e026f2016-10-10 11:27:36 -07004999 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5000 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5001 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5002 rc = setVideoHdrMode(mParameters, vhdr);
5003 if (rc != NO_ERROR) {
5004 LOGE("setVideoHDR is failed");
5005 }
5006 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005007
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005008 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005009 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005010 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005011 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5012 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5013 sensorModeFullFov)) {
5014 rc = BAD_VALUE;
5015 }
5016 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005017 //TODO: validate the arguments, HSV scenemode should have only the
5018 //advertised fps ranges
5019
5020 /*set the capture intent, hal version, tintless, stream info,
5021 *and disenable parameters to the backend*/
5022 LOGD("set_parms META_STREAM_INFO " );
5023 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005024 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5025 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005026 mStreamConfigInfo.type[i],
5027 mStreamConfigInfo.stream_sizes[i].width,
5028 mStreamConfigInfo.stream_sizes[i].height,
5029 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005030 mStreamConfigInfo.format[i],
5031 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005032 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005033
Thierry Strudel3d639192016-09-09 11:52:26 -07005034 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5035 mParameters);
5036 if (rc < 0) {
5037 LOGE("set_parms failed for hal version, stream info");
5038 }
5039
Chien-Yu Chenee335912017-02-09 17:53:20 -08005040 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5041 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005042 if (rc != NO_ERROR) {
5043 LOGE("Failed to get sensor output size");
5044 pthread_mutex_unlock(&mMutex);
5045 goto error_exit;
5046 }
5047
5048 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5049 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08005050 mSensorModeInfo.active_array_size.width,
5051 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005052
5053 /* Set batchmode before initializing channel. Since registerBuffer
5054 * internally initializes some of the channels, better set batchmode
5055 * even before first register buffer */
5056 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5057 it != mStreamInfo.end(); it++) {
5058 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5059 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5060 && mBatchSize) {
5061 rc = channel->setBatchSize(mBatchSize);
5062 //Disable per frame map unmap for HFR/batchmode case
5063 rc |= channel->setPerFrameMapUnmap(false);
5064 if (NO_ERROR != rc) {
5065 LOGE("Channel init failed %d", rc);
5066 pthread_mutex_unlock(&mMutex);
5067 goto error_exit;
5068 }
5069 }
5070 }
5071
5072 //First initialize all streams
5073 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5074 it != mStreamInfo.end(); it++) {
5075 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005076
5077 /* Initial value of NR mode is needed before stream on */
5078 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005079 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5080 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005081 setEis) {
5082 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5083 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5084 is_type = mStreamConfigInfo.is_type[i];
5085 break;
5086 }
5087 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005088 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005089 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005090 rc = channel->initialize(IS_TYPE_NONE);
5091 }
5092 if (NO_ERROR != rc) {
5093 LOGE("Channel initialization failed %d", rc);
5094 pthread_mutex_unlock(&mMutex);
5095 goto error_exit;
5096 }
5097 }
5098
5099 if (mRawDumpChannel) {
5100 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5101 if (rc != NO_ERROR) {
5102 LOGE("Error: Raw Dump Channel init failed");
5103 pthread_mutex_unlock(&mMutex);
5104 goto error_exit;
5105 }
5106 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005107 if (mHdrPlusRawSrcChannel) {
5108 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5109 if (rc != NO_ERROR) {
5110 LOGE("Error: HDR+ RAW Source Channel init failed");
5111 pthread_mutex_unlock(&mMutex);
5112 goto error_exit;
5113 }
5114 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005115 if (mSupportChannel) {
5116 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5117 if (rc < 0) {
5118 LOGE("Support channel initialization failed");
5119 pthread_mutex_unlock(&mMutex);
5120 goto error_exit;
5121 }
5122 }
5123 if (mAnalysisChannel) {
5124 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5125 if (rc < 0) {
5126 LOGE("Analysis channel initialization failed");
5127 pthread_mutex_unlock(&mMutex);
5128 goto error_exit;
5129 }
5130 }
5131 if (mDummyBatchChannel) {
5132 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5133 if (rc < 0) {
5134 LOGE("mDummyBatchChannel setBatchSize failed");
5135 pthread_mutex_unlock(&mMutex);
5136 goto error_exit;
5137 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005138 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005139 if (rc < 0) {
5140 LOGE("mDummyBatchChannel initialization failed");
5141 pthread_mutex_unlock(&mMutex);
5142 goto error_exit;
5143 }
5144 }
5145
5146 // Set bundle info
5147 rc = setBundleInfo();
5148 if (rc < 0) {
5149 LOGE("setBundleInfo failed %d", rc);
5150 pthread_mutex_unlock(&mMutex);
5151 goto error_exit;
5152 }
5153
5154 //update settings from app here
5155 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5156 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5157 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5158 }
5159 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5160 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5161 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5162 }
5163 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5164 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5165 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5166
5167 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5168 (mLinkedCameraId != mCameraId) ) {
5169 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5170 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005171 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005172 goto error_exit;
5173 }
5174 }
5175
5176 // add bundle related cameras
5177 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5178 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005179 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5180 &m_pDualCamCmdPtr->bundle_info;
5181 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005182 if (mIsDeviceLinked)
5183 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5184 else
5185 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5186
5187 pthread_mutex_lock(&gCamLock);
5188
5189 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5190 LOGE("Dualcam: Invalid Session Id ");
5191 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005192 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005193 goto error_exit;
5194 }
5195
5196 if (mIsMainCamera == 1) {
5197 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5198 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005199 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005200 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005201 // related session id should be session id of linked session
5202 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5203 } else {
5204 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5205 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005206 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005207 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005208 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5209 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005210 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005211 pthread_mutex_unlock(&gCamLock);
5212
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005213 rc = mCameraHandle->ops->set_dual_cam_cmd(
5214 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005215 if (rc < 0) {
5216 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005217 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005218 goto error_exit;
5219 }
5220 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005221 goto no_error;
5222error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005223 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005224 return rc;
5225no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005226 mWokenUpByDaemon = false;
5227 mPendingLiveRequest = 0;
5228 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005229 }
5230
5231 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005232 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005233
5234 if (mFlushPerf) {
5235 //we cannot accept any requests during flush
5236 LOGE("process_capture_request cannot proceed during flush");
5237 pthread_mutex_unlock(&mMutex);
5238 return NO_ERROR; //should return an error
5239 }
5240
5241 if (meta.exists(ANDROID_REQUEST_ID)) {
5242 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5243 mCurrentRequestId = request_id;
5244 LOGD("Received request with id: %d", request_id);
5245 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5246 LOGE("Unable to find request id field, \
5247 & no previous id available");
5248 pthread_mutex_unlock(&mMutex);
5249 return NAME_NOT_FOUND;
5250 } else {
5251 LOGD("Re-using old request id");
5252 request_id = mCurrentRequestId;
5253 }
5254
5255 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5256 request->num_output_buffers,
5257 request->input_buffer,
5258 frameNumber);
5259 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005260 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005261 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005262 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005263 uint32_t snapshotStreamId = 0;
5264 for (size_t i = 0; i < request->num_output_buffers; i++) {
5265 const camera3_stream_buffer_t& output = request->output_buffers[i];
5266 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5267
Emilian Peev7650c122017-01-19 08:24:33 -08005268 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5269 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005270 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005271 blob_request = 1;
5272 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5273 }
5274
5275 if (output.acquire_fence != -1) {
5276 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5277 close(output.acquire_fence);
5278 if (rc != OK) {
5279 LOGE("sync wait failed %d", rc);
5280 pthread_mutex_unlock(&mMutex);
5281 return rc;
5282 }
5283 }
5284
Emilian Peev0f3c3162017-03-15 12:57:46 +00005285 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5286 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005287 depthRequestPresent = true;
5288 continue;
5289 }
5290
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005291 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005292 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005293
5294 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5295 isVidBufRequested = true;
5296 }
5297 }
5298
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005299 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5300 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5301 itr++) {
5302 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5303 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5304 channel->getStreamID(channel->getStreamTypeMask());
5305
5306 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5307 isVidBufRequested = true;
5308 }
5309 }
5310
Thierry Strudel3d639192016-09-09 11:52:26 -07005311 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005312 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005313 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005314 }
5315 if (blob_request && mRawDumpChannel) {
5316 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005317 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005318 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005319 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005320 }
5321
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005322 {
5323 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5324 // Request a RAW buffer if
5325 // 1. mHdrPlusRawSrcChannel is valid.
5326 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5327 // 3. There is no pending HDR+ request.
5328 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5329 mHdrPlusPendingRequests.size() == 0) {
5330 streamsArray.stream_request[streamsArray.num_streams].streamID =
5331 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5332 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5333 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005334 }
5335
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005336 //extract capture intent
5337 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5338 mCaptureIntent =
5339 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5340 }
5341
5342 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5343 mCacMode =
5344 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5345 }
5346
5347 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005348 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005349
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005350 {
5351 Mutex::Autolock l(gHdrPlusClientLock);
5352 // If this request has a still capture intent, try to submit an HDR+ request.
5353 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5354 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5355 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5356 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005357 }
5358
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005359 if (hdrPlusRequest) {
5360 // For a HDR+ request, just set the frame parameters.
5361 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5362 if (rc < 0) {
5363 LOGE("fail to set frame parameters");
5364 pthread_mutex_unlock(&mMutex);
5365 return rc;
5366 }
5367 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005368 /* Parse the settings:
5369 * - For every request in NORMAL MODE
5370 * - For every request in HFR mode during preview only case
5371 * - For first request of every batch in HFR mode during video
5372 * recording. In batchmode the same settings except frame number is
5373 * repeated in each request of the batch.
5374 */
5375 if (!mBatchSize ||
5376 (mBatchSize && !isVidBufRequested) ||
5377 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005378 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005379 if (rc < 0) {
5380 LOGE("fail to set frame parameters");
5381 pthread_mutex_unlock(&mMutex);
5382 return rc;
5383 }
5384 }
5385 /* For batchMode HFR, setFrameParameters is not called for every
5386 * request. But only frame number of the latest request is parsed.
5387 * Keep track of first and last frame numbers in a batch so that
5388 * metadata for the frame numbers of batch can be duplicated in
5389 * handleBatchMetadta */
5390 if (mBatchSize) {
5391 if (!mToBeQueuedVidBufs) {
5392 //start of the batch
5393 mFirstFrameNumberInBatch = request->frame_number;
5394 }
5395 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5396 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5397 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005398 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005399 return BAD_VALUE;
5400 }
5401 }
5402 if (mNeedSensorRestart) {
5403 /* Unlock the mutex as restartSensor waits on the channels to be
5404 * stopped, which in turn calls stream callback functions -
5405 * handleBufferWithLock and handleMetadataWithLock */
5406 pthread_mutex_unlock(&mMutex);
5407 rc = dynamicUpdateMetaStreamInfo();
5408 if (rc != NO_ERROR) {
5409 LOGE("Restarting the sensor failed");
5410 return BAD_VALUE;
5411 }
5412 mNeedSensorRestart = false;
5413 pthread_mutex_lock(&mMutex);
5414 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005415 if(mResetInstantAEC) {
5416 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5417 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5418 mResetInstantAEC = false;
5419 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005420 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005421 if (request->input_buffer->acquire_fence != -1) {
5422 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5423 close(request->input_buffer->acquire_fence);
5424 if (rc != OK) {
5425 LOGE("input buffer sync wait failed %d", rc);
5426 pthread_mutex_unlock(&mMutex);
5427 return rc;
5428 }
5429 }
5430 }
5431
5432 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5433 mLastCustIntentFrmNum = frameNumber;
5434 }
5435 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005436 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005437 pendingRequestIterator latestRequest;
5438 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005439 pendingRequest.num_buffers = depthRequestPresent ?
5440 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005441 pendingRequest.request_id = request_id;
5442 pendingRequest.blob_request = blob_request;
5443 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005444 if (request->input_buffer) {
5445 pendingRequest.input_buffer =
5446 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5447 *(pendingRequest.input_buffer) = *(request->input_buffer);
5448 pInputBuffer = pendingRequest.input_buffer;
5449 } else {
5450 pendingRequest.input_buffer = NULL;
5451 pInputBuffer = NULL;
5452 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005453 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005454
5455 pendingRequest.pipeline_depth = 0;
5456 pendingRequest.partial_result_cnt = 0;
5457 extractJpegMetadata(mCurJpegMeta, request);
5458 pendingRequest.jpegMetadata = mCurJpegMeta;
5459 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5460 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005461 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005462 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5463 mHybridAeEnable =
5464 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5465 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005466
5467 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5468 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005469 /* DevCamDebug metadata processCaptureRequest */
5470 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5471 mDevCamDebugMetaEnable =
5472 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5473 }
5474 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5475 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005476
5477 //extract CAC info
5478 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5479 mCacMode =
5480 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5481 }
5482 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005483 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005484
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005485 // extract enableZsl info
5486 if (gExposeEnableZslKey) {
5487 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5488 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5489 mZslEnabled = pendingRequest.enableZsl;
5490 } else {
5491 pendingRequest.enableZsl = mZslEnabled;
5492 }
5493 }
5494
Thierry Strudel3d639192016-09-09 11:52:26 -07005495 PendingBuffersInRequest bufsForCurRequest;
5496 bufsForCurRequest.frame_number = frameNumber;
5497 // Mark current timestamp for the new request
5498 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005499 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005500
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005501 if (hdrPlusRequest) {
5502 // Save settings for this request.
5503 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5504 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5505
5506 // Add to pending HDR+ request queue.
5507 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5508 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5509
5510 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5511 }
5512
Thierry Strudel3d639192016-09-09 11:52:26 -07005513 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005514 if ((request->output_buffers[i].stream->data_space ==
5515 HAL_DATASPACE_DEPTH) &&
5516 (HAL_PIXEL_FORMAT_BLOB ==
5517 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005518 continue;
5519 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005520 RequestedBufferInfo requestedBuf;
5521 memset(&requestedBuf, 0, sizeof(requestedBuf));
5522 requestedBuf.stream = request->output_buffers[i].stream;
5523 requestedBuf.buffer = NULL;
5524 pendingRequest.buffers.push_back(requestedBuf);
5525
5526 // Add to buffer handle the pending buffers list
5527 PendingBufferInfo bufferInfo;
5528 bufferInfo.buffer = request->output_buffers[i].buffer;
5529 bufferInfo.stream = request->output_buffers[i].stream;
5530 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5531 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5532 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5533 frameNumber, bufferInfo.buffer,
5534 channel->getStreamTypeMask(), bufferInfo.stream->format);
5535 }
5536 // Add this request packet into mPendingBuffersMap
5537 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5538 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5539 mPendingBuffersMap.get_num_overall_buffers());
5540
5541 latestRequest = mPendingRequestsList.insert(
5542 mPendingRequestsList.end(), pendingRequest);
5543 if(mFlush) {
5544 LOGI("mFlush is true");
5545 pthread_mutex_unlock(&mMutex);
5546 return NO_ERROR;
5547 }
5548
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005549 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5550 // channel.
5551 if (!hdrPlusRequest) {
5552 int indexUsed;
5553 // Notify metadata channel we receive a request
5554 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005555
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005556 if(request->input_buffer != NULL){
5557 LOGD("Input request, frame_number %d", frameNumber);
5558 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5559 if (NO_ERROR != rc) {
5560 LOGE("fail to set reproc parameters");
5561 pthread_mutex_unlock(&mMutex);
5562 return rc;
5563 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005564 }
5565
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005566 // Call request on other streams
5567 uint32_t streams_need_metadata = 0;
5568 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5569 for (size_t i = 0; i < request->num_output_buffers; i++) {
5570 const camera3_stream_buffer_t& output = request->output_buffers[i];
5571 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5572
5573 if (channel == NULL) {
5574 LOGW("invalid channel pointer for stream");
5575 continue;
5576 }
5577
5578 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5579 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5580 output.buffer, request->input_buffer, frameNumber);
5581 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005582 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005583 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5584 if (rc < 0) {
5585 LOGE("Fail to request on picture channel");
5586 pthread_mutex_unlock(&mMutex);
5587 return rc;
5588 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005589 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005590 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5591 assert(NULL != mDepthChannel);
5592 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005593
Emilian Peev7650c122017-01-19 08:24:33 -08005594 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5595 if (rc < 0) {
5596 LOGE("Fail to map on depth buffer");
5597 pthread_mutex_unlock(&mMutex);
5598 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005599 }
Emilian Peev7650c122017-01-19 08:24:33 -08005600 } else {
5601 LOGD("snapshot request with buffer %p, frame_number %d",
5602 output.buffer, frameNumber);
5603 if (!request->settings) {
5604 rc = channel->request(output.buffer, frameNumber,
5605 NULL, mPrevParameters, indexUsed);
5606 } else {
5607 rc = channel->request(output.buffer, frameNumber,
5608 NULL, mParameters, indexUsed);
5609 }
5610 if (rc < 0) {
5611 LOGE("Fail to request on picture channel");
5612 pthread_mutex_unlock(&mMutex);
5613 return rc;
5614 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005615
Emilian Peev7650c122017-01-19 08:24:33 -08005616 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5617 uint32_t j = 0;
5618 for (j = 0; j < streamsArray.num_streams; j++) {
5619 if (streamsArray.stream_request[j].streamID == streamId) {
5620 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5621 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5622 else
5623 streamsArray.stream_request[j].buf_index = indexUsed;
5624 break;
5625 }
5626 }
5627 if (j == streamsArray.num_streams) {
5628 LOGE("Did not find matching stream to update index");
5629 assert(0);
5630 }
5631
5632 pendingBufferIter->need_metadata = true;
5633 streams_need_metadata++;
5634 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005635 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005636 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5637 bool needMetadata = false;
5638 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5639 rc = yuvChannel->request(output.buffer, frameNumber,
5640 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5641 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005642 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005643 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005644 pthread_mutex_unlock(&mMutex);
5645 return rc;
5646 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005647
5648 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5649 uint32_t j = 0;
5650 for (j = 0; j < streamsArray.num_streams; j++) {
5651 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005652 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5653 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5654 else
5655 streamsArray.stream_request[j].buf_index = indexUsed;
5656 break;
5657 }
5658 }
5659 if (j == streamsArray.num_streams) {
5660 LOGE("Did not find matching stream to update index");
5661 assert(0);
5662 }
5663
5664 pendingBufferIter->need_metadata = needMetadata;
5665 if (needMetadata)
5666 streams_need_metadata += 1;
5667 LOGD("calling YUV channel request, need_metadata is %d",
5668 needMetadata);
5669 } else {
5670 LOGD("request with buffer %p, frame_number %d",
5671 output.buffer, frameNumber);
5672
5673 rc = channel->request(output.buffer, frameNumber, indexUsed);
5674
5675 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5676 uint32_t j = 0;
5677 for (j = 0; j < streamsArray.num_streams; j++) {
5678 if (streamsArray.stream_request[j].streamID == streamId) {
5679 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5680 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5681 else
5682 streamsArray.stream_request[j].buf_index = indexUsed;
5683 break;
5684 }
5685 }
5686 if (j == streamsArray.num_streams) {
5687 LOGE("Did not find matching stream to update index");
5688 assert(0);
5689 }
5690
5691 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5692 && mBatchSize) {
5693 mToBeQueuedVidBufs++;
5694 if (mToBeQueuedVidBufs == mBatchSize) {
5695 channel->queueBatchBuf();
5696 }
5697 }
5698 if (rc < 0) {
5699 LOGE("request failed");
5700 pthread_mutex_unlock(&mMutex);
5701 return rc;
5702 }
5703 }
5704 pendingBufferIter++;
5705 }
5706
5707 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5708 itr++) {
5709 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5710
5711 if (channel == NULL) {
5712 LOGE("invalid channel pointer for stream");
5713 assert(0);
5714 return BAD_VALUE;
5715 }
5716
5717 InternalRequest requestedStream;
5718 requestedStream = (*itr);
5719
5720
5721 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5722 LOGD("snapshot request internally input buffer %p, frame_number %d",
5723 request->input_buffer, frameNumber);
5724 if(request->input_buffer != NULL){
5725 rc = channel->request(NULL, frameNumber,
5726 pInputBuffer, &mReprocMeta, indexUsed, true,
5727 requestedStream.meteringOnly);
5728 if (rc < 0) {
5729 LOGE("Fail to request on picture channel");
5730 pthread_mutex_unlock(&mMutex);
5731 return rc;
5732 }
5733 } else {
5734 LOGD("snapshot request with frame_number %d", frameNumber);
5735 if (!request->settings) {
5736 rc = channel->request(NULL, frameNumber,
5737 NULL, mPrevParameters, indexUsed, true,
5738 requestedStream.meteringOnly);
5739 } else {
5740 rc = channel->request(NULL, frameNumber,
5741 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5742 }
5743 if (rc < 0) {
5744 LOGE("Fail to request on picture channel");
5745 pthread_mutex_unlock(&mMutex);
5746 return rc;
5747 }
5748
5749 if ((*itr).meteringOnly != 1) {
5750 requestedStream.need_metadata = 1;
5751 streams_need_metadata++;
5752 }
5753 }
5754
5755 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5756 uint32_t j = 0;
5757 for (j = 0; j < streamsArray.num_streams; j++) {
5758 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005759 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5760 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5761 else
5762 streamsArray.stream_request[j].buf_index = indexUsed;
5763 break;
5764 }
5765 }
5766 if (j == streamsArray.num_streams) {
5767 LOGE("Did not find matching stream to update index");
5768 assert(0);
5769 }
5770
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005771 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005772 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005773 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005774 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005775 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005776 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005777 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005778
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005779 //If 2 streams have need_metadata set to true, fail the request, unless
5780 //we copy/reference count the metadata buffer
5781 if (streams_need_metadata > 1) {
5782 LOGE("not supporting request in which two streams requires"
5783 " 2 HAL metadata for reprocessing");
5784 pthread_mutex_unlock(&mMutex);
5785 return -EINVAL;
5786 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005787
Emilian Peev7650c122017-01-19 08:24:33 -08005788 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5789 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5790 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5791 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5792 pthread_mutex_unlock(&mMutex);
5793 return BAD_VALUE;
5794 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005795 if (request->input_buffer == NULL) {
5796 /* Set the parameters to backend:
5797 * - For every request in NORMAL MODE
5798 * - For every request in HFR mode during preview only case
5799 * - Once every batch in HFR mode during video recording
5800 */
5801 if (!mBatchSize ||
5802 (mBatchSize && !isVidBufRequested) ||
5803 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5804 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5805 mBatchSize, isVidBufRequested,
5806 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005807
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005808 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5809 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5810 uint32_t m = 0;
5811 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5812 if (streamsArray.stream_request[k].streamID ==
5813 mBatchedStreamsArray.stream_request[m].streamID)
5814 break;
5815 }
5816 if (m == mBatchedStreamsArray.num_streams) {
5817 mBatchedStreamsArray.stream_request\
5818 [mBatchedStreamsArray.num_streams].streamID =
5819 streamsArray.stream_request[k].streamID;
5820 mBatchedStreamsArray.stream_request\
5821 [mBatchedStreamsArray.num_streams].buf_index =
5822 streamsArray.stream_request[k].buf_index;
5823 mBatchedStreamsArray.num_streams =
5824 mBatchedStreamsArray.num_streams + 1;
5825 }
5826 }
5827 streamsArray = mBatchedStreamsArray;
5828 }
5829 /* Update stream id of all the requested buffers */
5830 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5831 streamsArray)) {
5832 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005833 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005834 return BAD_VALUE;
5835 }
5836
5837 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5838 mParameters);
5839 if (rc < 0) {
5840 LOGE("set_parms failed");
5841 }
5842 /* reset to zero coz, the batch is queued */
5843 mToBeQueuedVidBufs = 0;
5844 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5845 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5846 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005847 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5848 uint32_t m = 0;
5849 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5850 if (streamsArray.stream_request[k].streamID ==
5851 mBatchedStreamsArray.stream_request[m].streamID)
5852 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005853 }
5854 if (m == mBatchedStreamsArray.num_streams) {
5855 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5856 streamID = streamsArray.stream_request[k].streamID;
5857 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5858 buf_index = streamsArray.stream_request[k].buf_index;
5859 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5860 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005861 }
5862 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005863 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005864
5865 // Start all streams after the first setting is sent, so that the
5866 // setting can be applied sooner: (0 + apply_delay)th frame.
5867 if (mState == CONFIGURED && mChannelHandle) {
5868 //Then start them.
5869 LOGH("Start META Channel");
5870 rc = mMetadataChannel->start();
5871 if (rc < 0) {
5872 LOGE("META channel start failed");
5873 pthread_mutex_unlock(&mMutex);
5874 return rc;
5875 }
5876
5877 if (mAnalysisChannel) {
5878 rc = mAnalysisChannel->start();
5879 if (rc < 0) {
5880 LOGE("Analysis channel start failed");
5881 mMetadataChannel->stop();
5882 pthread_mutex_unlock(&mMutex);
5883 return rc;
5884 }
5885 }
5886
5887 if (mSupportChannel) {
5888 rc = mSupportChannel->start();
5889 if (rc < 0) {
5890 LOGE("Support channel start failed");
5891 mMetadataChannel->stop();
5892 /* Although support and analysis are mutually exclusive today
5893 adding it in anycase for future proofing */
5894 if (mAnalysisChannel) {
5895 mAnalysisChannel->stop();
5896 }
5897 pthread_mutex_unlock(&mMutex);
5898 return rc;
5899 }
5900 }
5901 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5902 it != mStreamInfo.end(); it++) {
5903 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5904 LOGH("Start Processing Channel mask=%d",
5905 channel->getStreamTypeMask());
5906 rc = channel->start();
5907 if (rc < 0) {
5908 LOGE("channel start failed");
5909 pthread_mutex_unlock(&mMutex);
5910 return rc;
5911 }
5912 }
5913
5914 if (mRawDumpChannel) {
5915 LOGD("Starting raw dump stream");
5916 rc = mRawDumpChannel->start();
5917 if (rc != NO_ERROR) {
5918 LOGE("Error Starting Raw Dump Channel");
5919 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5920 it != mStreamInfo.end(); it++) {
5921 QCamera3Channel *channel =
5922 (QCamera3Channel *)(*it)->stream->priv;
5923 LOGH("Stopping Processing Channel mask=%d",
5924 channel->getStreamTypeMask());
5925 channel->stop();
5926 }
5927 if (mSupportChannel)
5928 mSupportChannel->stop();
5929 if (mAnalysisChannel) {
5930 mAnalysisChannel->stop();
5931 }
5932 mMetadataChannel->stop();
5933 pthread_mutex_unlock(&mMutex);
5934 return rc;
5935 }
5936 }
5937
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005938 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005939 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005940 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005941 if (rc != NO_ERROR) {
5942 LOGE("start_channel failed %d", rc);
5943 pthread_mutex_unlock(&mMutex);
5944 return rc;
5945 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005946
5947 {
5948 // Configure Easel for stream on.
5949 Mutex::Autolock l(gHdrPlusClientLock);
5950 if (EaselManagerClientOpened) {
5951 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
5952 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
5953 if (rc != OK) {
5954 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5955 mCameraId, mSensorModeInfo.op_pixel_clk);
5956 pthread_mutex_unlock(&mMutex);
5957 return rc;
5958 }
5959 }
5960 }
5961
5962 // Start sensor streaming.
5963 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5964 mChannelHandle);
5965 if (rc != NO_ERROR) {
5966 LOGE("start_sensor_stream_on failed %d", rc);
5967 pthread_mutex_unlock(&mMutex);
5968 return rc;
5969 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005970 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005971 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005972 }
5973
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005974 // Enable HDR+ mode for the first PREVIEW_INTENT request.
5975 {
5976 Mutex::Autolock l(gHdrPlusClientLock);
5977 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5978 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5979 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5980 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5981 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5982 rc = enableHdrPlusModeLocked();
5983 if (rc != OK) {
5984 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
5985 pthread_mutex_unlock(&mMutex);
5986 return rc;
5987 }
5988
5989 mFirstPreviewIntentSeen = true;
5990 }
5991 }
5992
Thierry Strudel3d639192016-09-09 11:52:26 -07005993 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5994
5995 mState = STARTED;
5996 // Added a timed condition wait
5997 struct timespec ts;
5998 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005999 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006000 if (rc < 0) {
6001 isValidTimeout = 0;
6002 LOGE("Error reading the real time clock!!");
6003 }
6004 else {
6005 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006006 int64_t timeout = 5;
6007 {
6008 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6009 // If there is a pending HDR+ request, the following requests may be blocked until the
6010 // HDR+ request is done. So allow a longer timeout.
6011 if (mHdrPlusPendingRequests.size() > 0) {
6012 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6013 }
6014 }
6015 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006016 }
6017 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006018 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006019 (mState != ERROR) && (mState != DEINIT)) {
6020 if (!isValidTimeout) {
6021 LOGD("Blocking on conditional wait");
6022 pthread_cond_wait(&mRequestCond, &mMutex);
6023 }
6024 else {
6025 LOGD("Blocking on timed conditional wait");
6026 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6027 if (rc == ETIMEDOUT) {
6028 rc = -ENODEV;
6029 LOGE("Unblocked on timeout!!!!");
6030 break;
6031 }
6032 }
6033 LOGD("Unblocked");
6034 if (mWokenUpByDaemon) {
6035 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006036 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006037 break;
6038 }
6039 }
6040 pthread_mutex_unlock(&mMutex);
6041
6042 return rc;
6043}
6044
6045/*===========================================================================
6046 * FUNCTION : dump
6047 *
6048 * DESCRIPTION:
6049 *
6050 * PARAMETERS :
6051 *
6052 *
6053 * RETURN :
6054 *==========================================================================*/
6055void QCamera3HardwareInterface::dump(int fd)
6056{
6057 pthread_mutex_lock(&mMutex);
6058 dprintf(fd, "\n Camera HAL3 information Begin \n");
6059
6060 dprintf(fd, "\nNumber of pending requests: %zu \n",
6061 mPendingRequestsList.size());
6062 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6063 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6064 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6065 for(pendingRequestIterator i = mPendingRequestsList.begin();
6066 i != mPendingRequestsList.end(); i++) {
6067 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6068 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6069 i->input_buffer);
6070 }
6071 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6072 mPendingBuffersMap.get_num_overall_buffers());
6073 dprintf(fd, "-------+------------------\n");
6074 dprintf(fd, " Frame | Stream type mask \n");
6075 dprintf(fd, "-------+------------------\n");
6076 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6077 for(auto &j : req.mPendingBufferList) {
6078 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6079 dprintf(fd, " %5d | %11d \n",
6080 req.frame_number, channel->getStreamTypeMask());
6081 }
6082 }
6083 dprintf(fd, "-------+------------------\n");
6084
6085 dprintf(fd, "\nPending frame drop list: %zu\n",
6086 mPendingFrameDropList.size());
6087 dprintf(fd, "-------+-----------\n");
6088 dprintf(fd, " Frame | Stream ID \n");
6089 dprintf(fd, "-------+-----------\n");
6090 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6091 i != mPendingFrameDropList.end(); i++) {
6092 dprintf(fd, " %5d | %9d \n",
6093 i->frame_number, i->stream_ID);
6094 }
6095 dprintf(fd, "-------+-----------\n");
6096
6097 dprintf(fd, "\n Camera HAL3 information End \n");
6098
6099 /* use dumpsys media.camera as trigger to send update debug level event */
6100 mUpdateDebugLevel = true;
6101 pthread_mutex_unlock(&mMutex);
6102 return;
6103}
6104
6105/*===========================================================================
6106 * FUNCTION : flush
6107 *
6108 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6109 * conditionally restarts channels
6110 *
6111 * PARAMETERS :
6112 * @ restartChannels: re-start all channels
6113 *
6114 *
6115 * RETURN :
6116 * 0 on success
6117 * Error code on failure
6118 *==========================================================================*/
6119int QCamera3HardwareInterface::flush(bool restartChannels)
6120{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006121 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006122 int32_t rc = NO_ERROR;
6123
6124 LOGD("Unblocking Process Capture Request");
6125 pthread_mutex_lock(&mMutex);
6126 mFlush = true;
6127 pthread_mutex_unlock(&mMutex);
6128
6129 rc = stopAllChannels();
6130 // unlink of dualcam
6131 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006132 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6133 &m_pDualCamCmdPtr->bundle_info;
6134 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006135 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6136 pthread_mutex_lock(&gCamLock);
6137
6138 if (mIsMainCamera == 1) {
6139 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6140 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006141 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006142 // related session id should be session id of linked session
6143 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6144 } else {
6145 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6146 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006147 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006148 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6149 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006150 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006151 pthread_mutex_unlock(&gCamLock);
6152
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006153 rc = mCameraHandle->ops->set_dual_cam_cmd(
6154 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006155 if (rc < 0) {
6156 LOGE("Dualcam: Unlink failed, but still proceed to close");
6157 }
6158 }
6159
6160 if (rc < 0) {
6161 LOGE("stopAllChannels failed");
6162 return rc;
6163 }
6164 if (mChannelHandle) {
6165 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6166 mChannelHandle);
6167 }
6168
6169 // Reset bundle info
6170 rc = setBundleInfo();
6171 if (rc < 0) {
6172 LOGE("setBundleInfo failed %d", rc);
6173 return rc;
6174 }
6175
6176 // Mutex Lock
6177 pthread_mutex_lock(&mMutex);
6178
6179 // Unblock process_capture_request
6180 mPendingLiveRequest = 0;
6181 pthread_cond_signal(&mRequestCond);
6182
6183 rc = notifyErrorForPendingRequests();
6184 if (rc < 0) {
6185 LOGE("notifyErrorForPendingRequests failed");
6186 pthread_mutex_unlock(&mMutex);
6187 return rc;
6188 }
6189
6190 mFlush = false;
6191
6192 // Start the Streams/Channels
6193 if (restartChannels) {
6194 rc = startAllChannels();
6195 if (rc < 0) {
6196 LOGE("startAllChannels failed");
6197 pthread_mutex_unlock(&mMutex);
6198 return rc;
6199 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006200 if (mChannelHandle) {
6201 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006202 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006203 if (rc < 0) {
6204 LOGE("start_channel failed");
6205 pthread_mutex_unlock(&mMutex);
6206 return rc;
6207 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006208 }
6209 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006210 pthread_mutex_unlock(&mMutex);
6211
6212 return 0;
6213}
6214
6215/*===========================================================================
6216 * FUNCTION : flushPerf
6217 *
6218 * DESCRIPTION: This is the performance optimization version of flush that does
6219 * not use stream off, rather flushes the system
6220 *
6221 * PARAMETERS :
6222 *
6223 *
6224 * RETURN : 0 : success
6225 * -EINVAL: input is malformed (device is not valid)
6226 * -ENODEV: if the device has encountered a serious error
6227 *==========================================================================*/
6228int QCamera3HardwareInterface::flushPerf()
6229{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006230 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006231 int32_t rc = 0;
6232 struct timespec timeout;
6233 bool timed_wait = false;
6234
6235 pthread_mutex_lock(&mMutex);
6236 mFlushPerf = true;
6237 mPendingBuffersMap.numPendingBufsAtFlush =
6238 mPendingBuffersMap.get_num_overall_buffers();
6239 LOGD("Calling flush. Wait for %d buffers to return",
6240 mPendingBuffersMap.numPendingBufsAtFlush);
6241
6242 /* send the flush event to the backend */
6243 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6244 if (rc < 0) {
6245 LOGE("Error in flush: IOCTL failure");
6246 mFlushPerf = false;
6247 pthread_mutex_unlock(&mMutex);
6248 return -ENODEV;
6249 }
6250
6251 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6252 LOGD("No pending buffers in HAL, return flush");
6253 mFlushPerf = false;
6254 pthread_mutex_unlock(&mMutex);
6255 return rc;
6256 }
6257
6258 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006259 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006260 if (rc < 0) {
6261 LOGE("Error reading the real time clock, cannot use timed wait");
6262 } else {
6263 timeout.tv_sec += FLUSH_TIMEOUT;
6264 timed_wait = true;
6265 }
6266
6267 //Block on conditional variable
6268 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6269 LOGD("Waiting on mBuffersCond");
6270 if (!timed_wait) {
6271 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6272 if (rc != 0) {
6273 LOGE("pthread_cond_wait failed due to rc = %s",
6274 strerror(rc));
6275 break;
6276 }
6277 } else {
6278 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6279 if (rc != 0) {
6280 LOGE("pthread_cond_timedwait failed due to rc = %s",
6281 strerror(rc));
6282 break;
6283 }
6284 }
6285 }
6286 if (rc != 0) {
6287 mFlushPerf = false;
6288 pthread_mutex_unlock(&mMutex);
6289 return -ENODEV;
6290 }
6291
6292 LOGD("Received buffers, now safe to return them");
6293
6294 //make sure the channels handle flush
6295 //currently only required for the picture channel to release snapshot resources
6296 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6297 it != mStreamInfo.end(); it++) {
6298 QCamera3Channel *channel = (*it)->channel;
6299 if (channel) {
6300 rc = channel->flush();
6301 if (rc) {
6302 LOGE("Flushing the channels failed with error %d", rc);
6303 // even though the channel flush failed we need to continue and
6304 // return the buffers we have to the framework, however the return
6305 // value will be an error
6306 rc = -ENODEV;
6307 }
6308 }
6309 }
6310
6311 /* notify the frameworks and send errored results */
6312 rc = notifyErrorForPendingRequests();
6313 if (rc < 0) {
6314 LOGE("notifyErrorForPendingRequests failed");
6315 pthread_mutex_unlock(&mMutex);
6316 return rc;
6317 }
6318
6319 //unblock process_capture_request
6320 mPendingLiveRequest = 0;
6321 unblockRequestIfNecessary();
6322
6323 mFlushPerf = false;
6324 pthread_mutex_unlock(&mMutex);
6325 LOGD ("Flush Operation complete. rc = %d", rc);
6326 return rc;
6327}
6328
6329/*===========================================================================
6330 * FUNCTION : handleCameraDeviceError
6331 *
6332 * DESCRIPTION: This function calls internal flush and notifies the error to
6333 * framework and updates the state variable.
6334 *
6335 * PARAMETERS : None
6336 *
6337 * RETURN : NO_ERROR on Success
6338 * Error code on failure
6339 *==========================================================================*/
6340int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6341{
6342 int32_t rc = NO_ERROR;
6343
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006344 {
6345 Mutex::Autolock lock(mFlushLock);
6346 pthread_mutex_lock(&mMutex);
6347 if (mState != ERROR) {
6348 //if mState != ERROR, nothing to be done
6349 pthread_mutex_unlock(&mMutex);
6350 return NO_ERROR;
6351 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006352 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006353
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006354 rc = flush(false /* restart channels */);
6355 if (NO_ERROR != rc) {
6356 LOGE("internal flush to handle mState = ERROR failed");
6357 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006358
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006359 pthread_mutex_lock(&mMutex);
6360 mState = DEINIT;
6361 pthread_mutex_unlock(&mMutex);
6362 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006363
6364 camera3_notify_msg_t notify_msg;
6365 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6366 notify_msg.type = CAMERA3_MSG_ERROR;
6367 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6368 notify_msg.message.error.error_stream = NULL;
6369 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006370 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006371
6372 return rc;
6373}
6374
6375/*===========================================================================
6376 * FUNCTION : captureResultCb
6377 *
6378 * DESCRIPTION: Callback handler for all capture result
6379 * (streams, as well as metadata)
6380 *
6381 * PARAMETERS :
6382 * @metadata : metadata information
6383 * @buffer : actual gralloc buffer to be returned to frameworks.
6384 * NULL if metadata.
6385 *
6386 * RETURN : NONE
6387 *==========================================================================*/
6388void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6389 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6390{
6391 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006392 pthread_mutex_lock(&mMutex);
6393 uint8_t batchSize = mBatchSize;
6394 pthread_mutex_unlock(&mMutex);
6395 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006396 handleBatchMetadata(metadata_buf,
6397 true /* free_and_bufdone_meta_buf */);
6398 } else { /* mBatchSize = 0 */
6399 hdrPlusPerfLock(metadata_buf);
6400 pthread_mutex_lock(&mMutex);
6401 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006402 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006403 true /* last urgent frame of batch metadata */,
6404 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006405 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006406 pthread_mutex_unlock(&mMutex);
6407 }
6408 } else if (isInputBuffer) {
6409 pthread_mutex_lock(&mMutex);
6410 handleInputBufferWithLock(frame_number);
6411 pthread_mutex_unlock(&mMutex);
6412 } else {
6413 pthread_mutex_lock(&mMutex);
6414 handleBufferWithLock(buffer, frame_number);
6415 pthread_mutex_unlock(&mMutex);
6416 }
6417 return;
6418}
6419
6420/*===========================================================================
6421 * FUNCTION : getReprocessibleOutputStreamId
6422 *
6423 * DESCRIPTION: Get source output stream id for the input reprocess stream
6424 * based on size and format, which would be the largest
6425 * output stream if an input stream exists.
6426 *
6427 * PARAMETERS :
6428 * @id : return the stream id if found
6429 *
6430 * RETURN : int32_t type of status
6431 * NO_ERROR -- success
6432 * none-zero failure code
6433 *==========================================================================*/
6434int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6435{
6436 /* check if any output or bidirectional stream with the same size and format
6437 and return that stream */
6438 if ((mInputStreamInfo.dim.width > 0) &&
6439 (mInputStreamInfo.dim.height > 0)) {
6440 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6441 it != mStreamInfo.end(); it++) {
6442
6443 camera3_stream_t *stream = (*it)->stream;
6444 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6445 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6446 (stream->format == mInputStreamInfo.format)) {
6447 // Usage flag for an input stream and the source output stream
6448 // may be different.
6449 LOGD("Found reprocessible output stream! %p", *it);
6450 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6451 stream->usage, mInputStreamInfo.usage);
6452
6453 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6454 if (channel != NULL && channel->mStreams[0]) {
6455 id = channel->mStreams[0]->getMyServerID();
6456 return NO_ERROR;
6457 }
6458 }
6459 }
6460 } else {
6461 LOGD("No input stream, so no reprocessible output stream");
6462 }
6463 return NAME_NOT_FOUND;
6464}
6465
6466/*===========================================================================
6467 * FUNCTION : lookupFwkName
6468 *
6469 * DESCRIPTION: In case the enum is not same in fwk and backend
6470 * make sure the parameter is correctly propogated
6471 *
6472 * PARAMETERS :
6473 * @arr : map between the two enums
6474 * @len : len of the map
6475 * @hal_name : name of the hal_parm to map
6476 *
6477 * RETURN : int type of status
6478 * fwk_name -- success
6479 * none-zero failure code
6480 *==========================================================================*/
6481template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6482 size_t len, halType hal_name)
6483{
6484
6485 for (size_t i = 0; i < len; i++) {
6486 if (arr[i].hal_name == hal_name) {
6487 return arr[i].fwk_name;
6488 }
6489 }
6490
6491 /* Not able to find matching framework type is not necessarily
6492 * an error case. This happens when mm-camera supports more attributes
6493 * than the frameworks do */
6494 LOGH("Cannot find matching framework type");
6495 return NAME_NOT_FOUND;
6496}
6497
6498/*===========================================================================
6499 * FUNCTION : lookupHalName
6500 *
6501 * DESCRIPTION: In case the enum is not same in fwk and backend
6502 * make sure the parameter is correctly propogated
6503 *
6504 * PARAMETERS :
6505 * @arr : map between the two enums
6506 * @len : len of the map
6507 * @fwk_name : name of the hal_parm to map
6508 *
6509 * RETURN : int32_t type of status
6510 * hal_name -- success
6511 * none-zero failure code
6512 *==========================================================================*/
6513template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6514 size_t len, fwkType fwk_name)
6515{
6516 for (size_t i = 0; i < len; i++) {
6517 if (arr[i].fwk_name == fwk_name) {
6518 return arr[i].hal_name;
6519 }
6520 }
6521
6522 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6523 return NAME_NOT_FOUND;
6524}
6525
6526/*===========================================================================
6527 * FUNCTION : lookupProp
6528 *
6529 * DESCRIPTION: lookup a value by its name
6530 *
6531 * PARAMETERS :
6532 * @arr : map between the two enums
6533 * @len : size of the map
6534 * @name : name to be looked up
6535 *
6536 * RETURN : Value if found
6537 * CAM_CDS_MODE_MAX if not found
6538 *==========================================================================*/
6539template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6540 size_t len, const char *name)
6541{
6542 if (name) {
6543 for (size_t i = 0; i < len; i++) {
6544 if (!strcmp(arr[i].desc, name)) {
6545 return arr[i].val;
6546 }
6547 }
6548 }
6549 return CAM_CDS_MODE_MAX;
6550}
6551
6552/*===========================================================================
6553 *
6554 * DESCRIPTION:
6555 *
6556 * PARAMETERS :
6557 * @metadata : metadata information from callback
6558 * @timestamp: metadata buffer timestamp
6559 * @request_id: request id
6560 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006561 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006562 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6563 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006564 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006565 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6566 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006567 *
6568 * RETURN : camera_metadata_t*
6569 * metadata in a format specified by fwk
6570 *==========================================================================*/
6571camera_metadata_t*
6572QCamera3HardwareInterface::translateFromHalMetadata(
6573 metadata_buffer_t *metadata,
6574 nsecs_t timestamp,
6575 int32_t request_id,
6576 const CameraMetadata& jpegMetadata,
6577 uint8_t pipeline_depth,
6578 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006579 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006580 /* DevCamDebug metadata translateFromHalMetadata argument */
6581 uint8_t DevCamDebug_meta_enable,
6582 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006583 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006584 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006585 bool lastMetadataInBatch,
6586 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006587{
6588 CameraMetadata camMetadata;
6589 camera_metadata_t *resultMetadata;
6590
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006591 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006592 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6593 * Timestamp is needed because it's used for shutter notify calculation.
6594 * */
6595 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6596 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006597 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006598 }
6599
Thierry Strudel3d639192016-09-09 11:52:26 -07006600 if (jpegMetadata.entryCount())
6601 camMetadata.append(jpegMetadata);
6602
6603 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6604 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6605 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6606 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006607 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006608 if (mBatchSize == 0) {
6609 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6610 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6611 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006612
Samuel Ha68ba5172016-12-15 18:41:12 -08006613 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6614 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6615 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6616 // DevCamDebug metadata translateFromHalMetadata AF
6617 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6618 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6619 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6620 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6621 }
6622 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6623 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6624 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6625 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6626 }
6627 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6628 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6629 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6630 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6631 }
6632 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6633 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6634 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6635 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6636 }
6637 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6638 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6639 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6640 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6641 }
6642 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6643 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6644 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6645 *DevCamDebug_af_monitor_pdaf_target_pos;
6646 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6647 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6648 }
6649 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6650 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6651 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6652 *DevCamDebug_af_monitor_pdaf_confidence;
6653 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6654 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6655 }
6656 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6657 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6658 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6659 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6660 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6661 }
6662 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6663 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6664 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6665 *DevCamDebug_af_monitor_tof_target_pos;
6666 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6667 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6668 }
6669 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6670 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6671 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6672 *DevCamDebug_af_monitor_tof_confidence;
6673 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6674 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6675 }
6676 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6677 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6678 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6679 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6680 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6681 }
6682 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6683 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6684 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6685 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6686 &fwk_DevCamDebug_af_monitor_type_select, 1);
6687 }
6688 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6689 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6690 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6691 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6692 &fwk_DevCamDebug_af_monitor_refocus, 1);
6693 }
6694 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6695 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6696 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6697 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6698 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6699 }
6700 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6701 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6702 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6703 *DevCamDebug_af_search_pdaf_target_pos;
6704 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6705 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6706 }
6707 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6708 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6709 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6710 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6711 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6712 }
6713 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6714 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6715 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6716 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6717 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6718 }
6719 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6720 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6721 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6722 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6723 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6724 }
6725 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6726 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6727 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6728 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6729 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6730 }
6731 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6732 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6733 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6734 *DevCamDebug_af_search_tof_target_pos;
6735 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6736 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6737 }
6738 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6739 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6740 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6741 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6742 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6743 }
6744 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6745 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6746 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6747 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6748 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6749 }
6750 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6751 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6752 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6753 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6754 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6755 }
6756 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6757 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6758 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6759 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6760 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6761 }
6762 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6763 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6764 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6765 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6766 &fwk_DevCamDebug_af_search_type_select, 1);
6767 }
6768 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6769 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6770 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6771 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6772 &fwk_DevCamDebug_af_search_next_pos, 1);
6773 }
6774 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6775 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6776 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6777 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6778 &fwk_DevCamDebug_af_search_target_pos, 1);
6779 }
6780 // DevCamDebug metadata translateFromHalMetadata AEC
6781 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6782 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6783 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6784 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6785 }
6786 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6787 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6788 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6789 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6790 }
6791 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6792 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6793 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6794 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6795 }
6796 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6797 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6798 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6799 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6800 }
6801 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6802 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6803 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6804 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6805 }
6806 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6807 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6808 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6809 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6810 }
6811 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6812 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6813 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6814 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6815 }
6816 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6817 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6818 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6819 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6820 }
Samuel Ha34229982017-02-17 13:51:11 -08006821 // DevCamDebug metadata translateFromHalMetadata zzHDR
6822 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6823 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6824 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6825 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6826 }
6827 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6828 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006829 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006830 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6831 }
6832 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6833 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6834 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6835 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6836 }
6837 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6838 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006839 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006840 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6841 }
6842 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6843 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6844 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6845 *DevCamDebug_aec_hdr_sensitivity_ratio;
6846 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6847 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6848 }
6849 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6850 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6851 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6852 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6853 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6854 }
6855 // DevCamDebug metadata translateFromHalMetadata ADRC
6856 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6857 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6858 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6859 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6860 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6861 }
6862 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6863 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6864 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6865 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6866 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6867 }
6868 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6869 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6870 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6871 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6872 }
6873 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6874 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6875 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6876 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6877 }
6878 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6879 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6880 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6881 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6882 }
6883 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6884 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6885 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6886 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6887 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006888 // DevCamDebug metadata translateFromHalMetadata AWB
6889 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6890 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6891 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6892 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6893 }
6894 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6895 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6896 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6897 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6898 }
6899 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6900 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6901 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6902 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6903 }
6904 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6905 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6906 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6907 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6908 }
6909 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6910 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6911 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6912 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6913 }
6914 }
6915 // atrace_end(ATRACE_TAG_ALWAYS);
6916
Thierry Strudel3d639192016-09-09 11:52:26 -07006917 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6918 int64_t fwk_frame_number = *frame_number;
6919 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6920 }
6921
6922 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6923 int32_t fps_range[2];
6924 fps_range[0] = (int32_t)float_range->min_fps;
6925 fps_range[1] = (int32_t)float_range->max_fps;
6926 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6927 fps_range, 2);
6928 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6929 fps_range[0], fps_range[1]);
6930 }
6931
6932 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6933 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6934 }
6935
6936 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6937 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6938 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6939 *sceneMode);
6940 if (NAME_NOT_FOUND != val) {
6941 uint8_t fwkSceneMode = (uint8_t)val;
6942 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6943 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6944 fwkSceneMode);
6945 }
6946 }
6947
6948 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6949 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6950 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6951 }
6952
6953 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6954 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6955 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6956 }
6957
6958 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6959 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6960 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6961 }
6962
6963 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6964 CAM_INTF_META_EDGE_MODE, metadata) {
6965 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6966 }
6967
6968 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6969 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6970 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6971 }
6972
6973 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6974 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6975 }
6976
6977 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6978 if (0 <= *flashState) {
6979 uint8_t fwk_flashState = (uint8_t) *flashState;
6980 if (!gCamCapability[mCameraId]->flash_available) {
6981 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6982 }
6983 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6984 }
6985 }
6986
6987 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6988 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6989 if (NAME_NOT_FOUND != val) {
6990 uint8_t fwk_flashMode = (uint8_t)val;
6991 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6992 }
6993 }
6994
6995 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6996 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6997 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6998 }
6999
7000 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7001 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7002 }
7003
7004 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7005 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7006 }
7007
7008 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7009 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7010 }
7011
7012 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7013 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7014 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7015 }
7016
7017 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7018 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7019 LOGD("fwk_videoStab = %d", fwk_videoStab);
7020 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7021 } else {
7022 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7023 // and so hardcoding the Video Stab result to OFF mode.
7024 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7025 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007026 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007027 }
7028
7029 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7030 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7031 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7032 }
7033
7034 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7035 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7036 }
7037
Thierry Strudel3d639192016-09-09 11:52:26 -07007038 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7039 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007040 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007041
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007042 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7043 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007044
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007045 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007046 blackLevelAppliedPattern->cam_black_level[0],
7047 blackLevelAppliedPattern->cam_black_level[1],
7048 blackLevelAppliedPattern->cam_black_level[2],
7049 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007050 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7051 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007052
7053#ifndef USE_HAL_3_3
7054 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307055 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007056 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307057 fwk_blackLevelInd[0] /= 16.0;
7058 fwk_blackLevelInd[1] /= 16.0;
7059 fwk_blackLevelInd[2] /= 16.0;
7060 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007061 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7062 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007063#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007064 }
7065
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007066#ifndef USE_HAL_3_3
7067 // Fixed whitelevel is used by ISP/Sensor
7068 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7069 &gCamCapability[mCameraId]->white_level, 1);
7070#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007071
7072 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7073 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7074 int32_t scalerCropRegion[4];
7075 scalerCropRegion[0] = hScalerCropRegion->left;
7076 scalerCropRegion[1] = hScalerCropRegion->top;
7077 scalerCropRegion[2] = hScalerCropRegion->width;
7078 scalerCropRegion[3] = hScalerCropRegion->height;
7079
7080 // Adjust crop region from sensor output coordinate system to active
7081 // array coordinate system.
7082 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7083 scalerCropRegion[2], scalerCropRegion[3]);
7084
7085 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7086 }
7087
7088 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7089 LOGD("sensorExpTime = %lld", *sensorExpTime);
7090 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7091 }
7092
7093 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7094 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7095 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7096 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7097 }
7098
7099 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7100 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7101 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7102 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7103 sensorRollingShutterSkew, 1);
7104 }
7105
7106 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7107 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7108 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7109
7110 //calculate the noise profile based on sensitivity
7111 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7112 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7113 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7114 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7115 noise_profile[i] = noise_profile_S;
7116 noise_profile[i+1] = noise_profile_O;
7117 }
7118 LOGD("noise model entry (S, O) is (%f, %f)",
7119 noise_profile_S, noise_profile_O);
7120 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7121 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7122 }
7123
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007124#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007125 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007126 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007127 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007128 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007129 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7130 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7131 }
7132 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007133#endif
7134
Thierry Strudel3d639192016-09-09 11:52:26 -07007135 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7136 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7137 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7138 }
7139
7140 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7141 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7142 *faceDetectMode);
7143 if (NAME_NOT_FOUND != val) {
7144 uint8_t fwk_faceDetectMode = (uint8_t)val;
7145 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7146
7147 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7148 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7149 CAM_INTF_META_FACE_DETECTION, metadata) {
7150 uint8_t numFaces = MIN(
7151 faceDetectionInfo->num_faces_detected, MAX_ROI);
7152 int32_t faceIds[MAX_ROI];
7153 uint8_t faceScores[MAX_ROI];
7154 int32_t faceRectangles[MAX_ROI * 4];
7155 int32_t faceLandmarks[MAX_ROI * 6];
7156 size_t j = 0, k = 0;
7157
7158 for (size_t i = 0; i < numFaces; i++) {
7159 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7160 // Adjust crop region from sensor output coordinate system to active
7161 // array coordinate system.
7162 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7163 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7164 rect.width, rect.height);
7165
7166 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7167 faceRectangles+j, -1);
7168
Jason Lee8ce36fa2017-04-19 19:40:37 -07007169 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7170 "bottom-right (%d, %d)",
7171 faceDetectionInfo->frame_id, i,
7172 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7173 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7174
Thierry Strudel3d639192016-09-09 11:52:26 -07007175 j+= 4;
7176 }
7177 if (numFaces <= 0) {
7178 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7179 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7180 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7181 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7182 }
7183
7184 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7185 numFaces);
7186 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7187 faceRectangles, numFaces * 4U);
7188 if (fwk_faceDetectMode ==
7189 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7190 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7191 CAM_INTF_META_FACE_LANDMARK, metadata) {
7192
7193 for (size_t i = 0; i < numFaces; i++) {
7194 // Map the co-ordinate sensor output coordinate system to active
7195 // array coordinate system.
7196 mCropRegionMapper.toActiveArray(
7197 landmarks->face_landmarks[i].left_eye_center.x,
7198 landmarks->face_landmarks[i].left_eye_center.y);
7199 mCropRegionMapper.toActiveArray(
7200 landmarks->face_landmarks[i].right_eye_center.x,
7201 landmarks->face_landmarks[i].right_eye_center.y);
7202 mCropRegionMapper.toActiveArray(
7203 landmarks->face_landmarks[i].mouth_center.x,
7204 landmarks->face_landmarks[i].mouth_center.y);
7205
7206 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007207
7208 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7209 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7210 faceDetectionInfo->frame_id, i,
7211 faceLandmarks[k + LEFT_EYE_X],
7212 faceLandmarks[k + LEFT_EYE_Y],
7213 faceLandmarks[k + RIGHT_EYE_X],
7214 faceLandmarks[k + RIGHT_EYE_Y],
7215 faceLandmarks[k + MOUTH_X],
7216 faceLandmarks[k + MOUTH_Y]);
7217
Thierry Strudel04e026f2016-10-10 11:27:36 -07007218 k+= TOTAL_LANDMARK_INDICES;
7219 }
7220 } else {
7221 for (size_t i = 0; i < numFaces; i++) {
7222 setInvalidLandmarks(faceLandmarks+k);
7223 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007224 }
7225 }
7226
Jason Lee49619db2017-04-13 12:07:22 -07007227 for (size_t i = 0; i < numFaces; i++) {
7228 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7229
7230 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7231 faceDetectionInfo->frame_id, i, faceIds[i]);
7232 }
7233
Thierry Strudel3d639192016-09-09 11:52:26 -07007234 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7235 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7236 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007237 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007238 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7239 CAM_INTF_META_FACE_BLINK, metadata) {
7240 uint8_t detected[MAX_ROI];
7241 uint8_t degree[MAX_ROI * 2];
7242 for (size_t i = 0; i < numFaces; i++) {
7243 detected[i] = blinks->blink[i].blink_detected;
7244 degree[2 * i] = blinks->blink[i].left_blink;
7245 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007246
Jason Lee49619db2017-04-13 12:07:22 -07007247 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7248 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7249 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7250 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007251 }
7252 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7253 detected, numFaces);
7254 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7255 degree, numFaces * 2);
7256 }
7257 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7258 CAM_INTF_META_FACE_SMILE, metadata) {
7259 uint8_t degree[MAX_ROI];
7260 uint8_t confidence[MAX_ROI];
7261 for (size_t i = 0; i < numFaces; i++) {
7262 degree[i] = smiles->smile[i].smile_degree;
7263 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007264
Jason Lee49619db2017-04-13 12:07:22 -07007265 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7266 "smile_degree=%d, smile_score=%d",
7267 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007268 }
7269 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7270 degree, numFaces);
7271 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7272 confidence, numFaces);
7273 }
7274 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7275 CAM_INTF_META_FACE_GAZE, metadata) {
7276 int8_t angle[MAX_ROI];
7277 int32_t direction[MAX_ROI * 3];
7278 int8_t degree[MAX_ROI * 2];
7279 for (size_t i = 0; i < numFaces; i++) {
7280 angle[i] = gazes->gaze[i].gaze_angle;
7281 direction[3 * i] = gazes->gaze[i].updown_dir;
7282 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7283 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7284 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7285 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007286
7287 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7288 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7289 "left_right_gaze=%d, top_bottom_gaze=%d",
7290 faceDetectionInfo->frame_id, i, angle[i],
7291 direction[3 * i], direction[3 * i + 1],
7292 direction[3 * i + 2],
7293 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007294 }
7295 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7296 (uint8_t *)angle, numFaces);
7297 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7298 direction, numFaces * 3);
7299 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7300 (uint8_t *)degree, numFaces * 2);
7301 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007302 }
7303 }
7304 }
7305 }
7306
7307 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7308 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007309 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007310 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007311 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007312
Shuzhen Wang14415f52016-11-16 18:26:18 -08007313 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7314 histogramBins = *histBins;
7315 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7316 }
7317
7318 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007319 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7320 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007321 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007322
7323 switch (stats_data->type) {
7324 case CAM_HISTOGRAM_TYPE_BAYER:
7325 switch (stats_data->bayer_stats.data_type) {
7326 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007327 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7328 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007329 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007330 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7331 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007332 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007333 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7334 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007335 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007336 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007337 case CAM_STATS_CHANNEL_R:
7338 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007339 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7340 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007341 }
7342 break;
7343 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007344 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007345 break;
7346 }
7347
Shuzhen Wang14415f52016-11-16 18:26:18 -08007348 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007349 }
7350 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007351 }
7352
7353 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7354 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7355 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7356 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7357 }
7358
7359 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7360 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7361 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7362 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7363 }
7364
7365 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7366 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7367 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7368 CAM_MAX_SHADING_MAP_HEIGHT);
7369 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7370 CAM_MAX_SHADING_MAP_WIDTH);
7371 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7372 lensShadingMap->lens_shading, 4U * map_width * map_height);
7373 }
7374
7375 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7376 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7377 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7378 }
7379
7380 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7381 //Populate CAM_INTF_META_TONEMAP_CURVES
7382 /* ch0 = G, ch 1 = B, ch 2 = R*/
7383 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7384 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7385 tonemap->tonemap_points_cnt,
7386 CAM_MAX_TONEMAP_CURVE_SIZE);
7387 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7388 }
7389
7390 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7391 &tonemap->curves[0].tonemap_points[0][0],
7392 tonemap->tonemap_points_cnt * 2);
7393
7394 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7395 &tonemap->curves[1].tonemap_points[0][0],
7396 tonemap->tonemap_points_cnt * 2);
7397
7398 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7399 &tonemap->curves[2].tonemap_points[0][0],
7400 tonemap->tonemap_points_cnt * 2);
7401 }
7402
7403 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7404 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7405 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7406 CC_GAIN_MAX);
7407 }
7408
7409 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7410 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7411 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7412 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7413 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7414 }
7415
7416 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7417 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7418 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7419 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7420 toneCurve->tonemap_points_cnt,
7421 CAM_MAX_TONEMAP_CURVE_SIZE);
7422 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7423 }
7424 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7425 (float*)toneCurve->curve.tonemap_points,
7426 toneCurve->tonemap_points_cnt * 2);
7427 }
7428
7429 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7430 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7431 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7432 predColorCorrectionGains->gains, 4);
7433 }
7434
7435 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7436 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7437 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7438 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7439 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7440 }
7441
7442 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7443 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7444 }
7445
7446 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7447 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7448 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7449 }
7450
7451 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7452 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7453 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7454 }
7455
7456 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7457 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7458 *effectMode);
7459 if (NAME_NOT_FOUND != val) {
7460 uint8_t fwk_effectMode = (uint8_t)val;
7461 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7462 }
7463 }
7464
7465 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7466 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7467 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7468 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7469 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7470 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7471 }
7472 int32_t fwk_testPatternData[4];
7473 fwk_testPatternData[0] = testPatternData->r;
7474 fwk_testPatternData[3] = testPatternData->b;
7475 switch (gCamCapability[mCameraId]->color_arrangement) {
7476 case CAM_FILTER_ARRANGEMENT_RGGB:
7477 case CAM_FILTER_ARRANGEMENT_GRBG:
7478 fwk_testPatternData[1] = testPatternData->gr;
7479 fwk_testPatternData[2] = testPatternData->gb;
7480 break;
7481 case CAM_FILTER_ARRANGEMENT_GBRG:
7482 case CAM_FILTER_ARRANGEMENT_BGGR:
7483 fwk_testPatternData[2] = testPatternData->gr;
7484 fwk_testPatternData[1] = testPatternData->gb;
7485 break;
7486 default:
7487 LOGE("color arrangement %d is not supported",
7488 gCamCapability[mCameraId]->color_arrangement);
7489 break;
7490 }
7491 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7492 }
7493
7494 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7495 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7496 }
7497
7498 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7499 String8 str((const char *)gps_methods);
7500 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7501 }
7502
7503 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7504 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7505 }
7506
7507 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7508 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7509 }
7510
7511 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7512 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7513 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7514 }
7515
7516 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7517 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7518 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7519 }
7520
7521 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7522 int32_t fwk_thumb_size[2];
7523 fwk_thumb_size[0] = thumb_size->width;
7524 fwk_thumb_size[1] = thumb_size->height;
7525 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7526 }
7527
7528 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7529 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7530 privateData,
7531 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7532 }
7533
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007534 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007535 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007536 meteringMode, 1);
7537 }
7538
Thierry Strudel54dc9782017-02-15 12:12:10 -08007539 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7540 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7541 LOGD("hdr_scene_data: %d %f\n",
7542 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7543 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7544 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7545 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7546 &isHdr, 1);
7547 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7548 &isHdrConfidence, 1);
7549 }
7550
7551
7552
Thierry Strudel3d639192016-09-09 11:52:26 -07007553 if (metadata->is_tuning_params_valid) {
7554 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7555 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7556 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7557
7558
7559 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7560 sizeof(uint32_t));
7561 data += sizeof(uint32_t);
7562
7563 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7564 sizeof(uint32_t));
7565 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7566 data += sizeof(uint32_t);
7567
7568 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7569 sizeof(uint32_t));
7570 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7571 data += sizeof(uint32_t);
7572
7573 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7574 sizeof(uint32_t));
7575 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7576 data += sizeof(uint32_t);
7577
7578 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7579 sizeof(uint32_t));
7580 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7581 data += sizeof(uint32_t);
7582
7583 metadata->tuning_params.tuning_mod3_data_size = 0;
7584 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7585 sizeof(uint32_t));
7586 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7587 data += sizeof(uint32_t);
7588
7589 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7590 TUNING_SENSOR_DATA_MAX);
7591 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7592 count);
7593 data += count;
7594
7595 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7596 TUNING_VFE_DATA_MAX);
7597 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7598 count);
7599 data += count;
7600
7601 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7602 TUNING_CPP_DATA_MAX);
7603 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7604 count);
7605 data += count;
7606
7607 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7608 TUNING_CAC_DATA_MAX);
7609 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7610 count);
7611 data += count;
7612
7613 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7614 (int32_t *)(void *)tuning_meta_data_blob,
7615 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7616 }
7617
7618 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7619 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7620 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7621 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7622 NEUTRAL_COL_POINTS);
7623 }
7624
7625 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7626 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7627 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7628 }
7629
7630 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7631 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7632 // Adjust crop region from sensor output coordinate system to active
7633 // array coordinate system.
7634 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7635 hAeRegions->rect.width, hAeRegions->rect.height);
7636
7637 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7638 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7639 REGIONS_TUPLE_COUNT);
7640 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7641 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7642 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7643 hAeRegions->rect.height);
7644 }
7645
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007646 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7647 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7648 if (NAME_NOT_FOUND != val) {
7649 uint8_t fwkAfMode = (uint8_t)val;
7650 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7651 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7652 } else {
7653 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7654 val);
7655 }
7656 }
7657
Thierry Strudel3d639192016-09-09 11:52:26 -07007658 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7659 uint8_t fwk_afState = (uint8_t) *afState;
7660 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007661 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007662 }
7663
7664 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7665 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7666 }
7667
7668 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7669 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7670 }
7671
7672 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7673 uint8_t fwk_lensState = *lensState;
7674 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7675 }
7676
Thierry Strudel3d639192016-09-09 11:52:26 -07007677
7678 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007679 uint32_t ab_mode = *hal_ab_mode;
7680 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7681 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7682 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7683 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007684 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007685 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007686 if (NAME_NOT_FOUND != val) {
7687 uint8_t fwk_ab_mode = (uint8_t)val;
7688 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7689 }
7690 }
7691
7692 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7693 int val = lookupFwkName(SCENE_MODES_MAP,
7694 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7695 if (NAME_NOT_FOUND != val) {
7696 uint8_t fwkBestshotMode = (uint8_t)val;
7697 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7698 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7699 } else {
7700 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7701 }
7702 }
7703
7704 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7705 uint8_t fwk_mode = (uint8_t) *mode;
7706 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7707 }
7708
7709 /* Constant metadata values to be update*/
7710 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7711 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7712
7713 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7714 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7715
7716 int32_t hotPixelMap[2];
7717 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7718
7719 // CDS
7720 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7721 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7722 }
7723
Thierry Strudel04e026f2016-10-10 11:27:36 -07007724 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7725 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007726 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007727 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7728 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7729 } else {
7730 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7731 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007732
7733 if(fwk_hdr != curr_hdr_state) {
7734 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7735 if(fwk_hdr)
7736 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7737 else
7738 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7739 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007740 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7741 }
7742
Thierry Strudel54dc9782017-02-15 12:12:10 -08007743 //binning correction
7744 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7745 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7746 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7747 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7748 }
7749
Thierry Strudel04e026f2016-10-10 11:27:36 -07007750 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007751 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007752 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7753 int8_t is_ir_on = 0;
7754
7755 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7756 if(is_ir_on != curr_ir_state) {
7757 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7758 if(is_ir_on)
7759 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7760 else
7761 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7762 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007763 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007764 }
7765
Thierry Strudel269c81a2016-10-12 12:13:59 -07007766 // AEC SPEED
7767 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7768 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7769 }
7770
7771 // AWB SPEED
7772 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7773 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7774 }
7775
Thierry Strudel3d639192016-09-09 11:52:26 -07007776 // TNR
7777 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7778 uint8_t tnr_enable = tnr->denoise_enable;
7779 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007780 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7781 int8_t is_tnr_on = 0;
7782
7783 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7784 if(is_tnr_on != curr_tnr_state) {
7785 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7786 if(is_tnr_on)
7787 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7788 else
7789 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7790 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007791
7792 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7793 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7794 }
7795
7796 // Reprocess crop data
7797 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7798 uint8_t cnt = crop_data->num_of_streams;
7799 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7800 // mm-qcamera-daemon only posts crop_data for streams
7801 // not linked to pproc. So no valid crop metadata is not
7802 // necessarily an error case.
7803 LOGD("No valid crop metadata entries");
7804 } else {
7805 uint32_t reproc_stream_id;
7806 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7807 LOGD("No reprocessible stream found, ignore crop data");
7808 } else {
7809 int rc = NO_ERROR;
7810 Vector<int32_t> roi_map;
7811 int32_t *crop = new int32_t[cnt*4];
7812 if (NULL == crop) {
7813 rc = NO_MEMORY;
7814 }
7815 if (NO_ERROR == rc) {
7816 int32_t streams_found = 0;
7817 for (size_t i = 0; i < cnt; i++) {
7818 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7819 if (pprocDone) {
7820 // HAL already does internal reprocessing,
7821 // either via reprocessing before JPEG encoding,
7822 // or offline postprocessing for pproc bypass case.
7823 crop[0] = 0;
7824 crop[1] = 0;
7825 crop[2] = mInputStreamInfo.dim.width;
7826 crop[3] = mInputStreamInfo.dim.height;
7827 } else {
7828 crop[0] = crop_data->crop_info[i].crop.left;
7829 crop[1] = crop_data->crop_info[i].crop.top;
7830 crop[2] = crop_data->crop_info[i].crop.width;
7831 crop[3] = crop_data->crop_info[i].crop.height;
7832 }
7833 roi_map.add(crop_data->crop_info[i].roi_map.left);
7834 roi_map.add(crop_data->crop_info[i].roi_map.top);
7835 roi_map.add(crop_data->crop_info[i].roi_map.width);
7836 roi_map.add(crop_data->crop_info[i].roi_map.height);
7837 streams_found++;
7838 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7839 crop[0], crop[1], crop[2], crop[3]);
7840 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7841 crop_data->crop_info[i].roi_map.left,
7842 crop_data->crop_info[i].roi_map.top,
7843 crop_data->crop_info[i].roi_map.width,
7844 crop_data->crop_info[i].roi_map.height);
7845 break;
7846
7847 }
7848 }
7849 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7850 &streams_found, 1);
7851 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7852 crop, (size_t)(streams_found * 4));
7853 if (roi_map.array()) {
7854 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7855 roi_map.array(), roi_map.size());
7856 }
7857 }
7858 if (crop) {
7859 delete [] crop;
7860 }
7861 }
7862 }
7863 }
7864
7865 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7866 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7867 // so hardcoding the CAC result to OFF mode.
7868 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7869 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7870 } else {
7871 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7872 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7873 *cacMode);
7874 if (NAME_NOT_FOUND != val) {
7875 uint8_t resultCacMode = (uint8_t)val;
7876 // check whether CAC result from CB is equal to Framework set CAC mode
7877 // If not equal then set the CAC mode came in corresponding request
7878 if (fwk_cacMode != resultCacMode) {
7879 resultCacMode = fwk_cacMode;
7880 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007881 //Check if CAC is disabled by property
7882 if (m_cacModeDisabled) {
7883 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7884 }
7885
Thierry Strudel3d639192016-09-09 11:52:26 -07007886 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7887 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7888 } else {
7889 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7890 }
7891 }
7892 }
7893
7894 // Post blob of cam_cds_data through vendor tag.
7895 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7896 uint8_t cnt = cdsInfo->num_of_streams;
7897 cam_cds_data_t cdsDataOverride;
7898 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7899 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7900 cdsDataOverride.num_of_streams = 1;
7901 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7902 uint32_t reproc_stream_id;
7903 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7904 LOGD("No reprocessible stream found, ignore cds data");
7905 } else {
7906 for (size_t i = 0; i < cnt; i++) {
7907 if (cdsInfo->cds_info[i].stream_id ==
7908 reproc_stream_id) {
7909 cdsDataOverride.cds_info[0].cds_enable =
7910 cdsInfo->cds_info[i].cds_enable;
7911 break;
7912 }
7913 }
7914 }
7915 } else {
7916 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7917 }
7918 camMetadata.update(QCAMERA3_CDS_INFO,
7919 (uint8_t *)&cdsDataOverride,
7920 sizeof(cam_cds_data_t));
7921 }
7922
7923 // Ldaf calibration data
7924 if (!mLdafCalibExist) {
7925 IF_META_AVAILABLE(uint32_t, ldafCalib,
7926 CAM_INTF_META_LDAF_EXIF, metadata) {
7927 mLdafCalibExist = true;
7928 mLdafCalib[0] = ldafCalib[0];
7929 mLdafCalib[1] = ldafCalib[1];
7930 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7931 ldafCalib[0], ldafCalib[1]);
7932 }
7933 }
7934
Thierry Strudel54dc9782017-02-15 12:12:10 -08007935 // EXIF debug data through vendor tag
7936 /*
7937 * Mobicat Mask can assume 3 values:
7938 * 1 refers to Mobicat data,
7939 * 2 refers to Stats Debug and Exif Debug Data
7940 * 3 refers to Mobicat and Stats Debug Data
7941 * We want to make sure that we are sending Exif debug data
7942 * only when Mobicat Mask is 2.
7943 */
7944 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7945 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7946 (uint8_t *)(void *)mExifParams.debug_params,
7947 sizeof(mm_jpeg_debug_exif_params_t));
7948 }
7949
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007950 // Reprocess and DDM debug data through vendor tag
7951 cam_reprocess_info_t repro_info;
7952 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007953 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7954 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007955 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007956 }
7957 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7958 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007959 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007960 }
7961 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7962 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007963 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007964 }
7965 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7966 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007967 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007968 }
7969 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7970 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007971 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007972 }
7973 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007974 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007975 }
7976 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7977 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007978 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007979 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007980 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7981 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7982 }
7983 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7984 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7985 }
7986 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7987 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007988
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007989 // INSTANT AEC MODE
7990 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7991 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7992 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7993 }
7994
Shuzhen Wange763e802016-03-31 10:24:29 -07007995 // AF scene change
7996 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7997 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7998 }
7999
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008000 // Enable ZSL
8001 if (enableZsl != nullptr) {
8002 uint8_t value = *enableZsl ?
8003 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8004 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8005 }
8006
Thierry Strudel3d639192016-09-09 11:52:26 -07008007 resultMetadata = camMetadata.release();
8008 return resultMetadata;
8009}
8010
8011/*===========================================================================
8012 * FUNCTION : saveExifParams
8013 *
8014 * DESCRIPTION:
8015 *
8016 * PARAMETERS :
8017 * @metadata : metadata information from callback
8018 *
8019 * RETURN : none
8020 *
8021 *==========================================================================*/
8022void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8023{
8024 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8025 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8026 if (mExifParams.debug_params) {
8027 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8028 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8029 }
8030 }
8031 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8032 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8033 if (mExifParams.debug_params) {
8034 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8035 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8036 }
8037 }
8038 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8039 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8040 if (mExifParams.debug_params) {
8041 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8042 mExifParams.debug_params->af_debug_params_valid = TRUE;
8043 }
8044 }
8045 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8046 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8047 if (mExifParams.debug_params) {
8048 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8049 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8050 }
8051 }
8052 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8053 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8054 if (mExifParams.debug_params) {
8055 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8056 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8057 }
8058 }
8059 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8060 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8061 if (mExifParams.debug_params) {
8062 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8063 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8064 }
8065 }
8066 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8067 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8068 if (mExifParams.debug_params) {
8069 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8070 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8071 }
8072 }
8073 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8074 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8075 if (mExifParams.debug_params) {
8076 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8077 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8078 }
8079 }
8080}
8081
8082/*===========================================================================
8083 * FUNCTION : get3AExifParams
8084 *
8085 * DESCRIPTION:
8086 *
8087 * PARAMETERS : none
8088 *
8089 *
8090 * RETURN : mm_jpeg_exif_params_t
8091 *
8092 *==========================================================================*/
8093mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8094{
8095 return mExifParams;
8096}
8097
8098/*===========================================================================
8099 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8100 *
8101 * DESCRIPTION:
8102 *
8103 * PARAMETERS :
8104 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008105 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8106 * urgent metadata in a batch. Always true for
8107 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008108 *
8109 * RETURN : camera_metadata_t*
8110 * metadata in a format specified by fwk
8111 *==========================================================================*/
8112camera_metadata_t*
8113QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008114 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008115{
8116 CameraMetadata camMetadata;
8117 camera_metadata_t *resultMetadata;
8118
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008119 if (!lastUrgentMetadataInBatch) {
8120 /* In batch mode, use empty metadata if this is not the last in batch
8121 */
8122 resultMetadata = allocate_camera_metadata(0, 0);
8123 return resultMetadata;
8124 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008125
8126 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8127 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8128 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8129 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8130 }
8131
8132 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8133 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8134 &aecTrigger->trigger, 1);
8135 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8136 &aecTrigger->trigger_id, 1);
8137 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8138 aecTrigger->trigger);
8139 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8140 aecTrigger->trigger_id);
8141 }
8142
8143 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8144 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8145 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8146 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8147 }
8148
Thierry Strudel3d639192016-09-09 11:52:26 -07008149 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8150 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8151 &af_trigger->trigger, 1);
8152 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8153 af_trigger->trigger);
8154 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8155 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8156 af_trigger->trigger_id);
8157 }
8158
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008159 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8160 /*af regions*/
8161 int32_t afRegions[REGIONS_TUPLE_COUNT];
8162 // Adjust crop region from sensor output coordinate system to active
8163 // array coordinate system.
8164 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8165 hAfRegions->rect.width, hAfRegions->rect.height);
8166
8167 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8168 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8169 REGIONS_TUPLE_COUNT);
8170 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8171 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8172 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8173 hAfRegions->rect.height);
8174 }
8175
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008176 // AF region confidence
8177 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8178 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8179 }
8180
Thierry Strudel3d639192016-09-09 11:52:26 -07008181 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8182 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8183 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8184 if (NAME_NOT_FOUND != val) {
8185 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8186 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8187 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8188 } else {
8189 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8190 }
8191 }
8192
8193 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8194 uint32_t aeMode = CAM_AE_MODE_MAX;
8195 int32_t flashMode = CAM_FLASH_MODE_MAX;
8196 int32_t redeye = -1;
8197 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8198 aeMode = *pAeMode;
8199 }
8200 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8201 flashMode = *pFlashMode;
8202 }
8203 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8204 redeye = *pRedeye;
8205 }
8206
8207 if (1 == redeye) {
8208 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8209 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8210 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8211 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8212 flashMode);
8213 if (NAME_NOT_FOUND != val) {
8214 fwk_aeMode = (uint8_t)val;
8215 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8216 } else {
8217 LOGE("Unsupported flash mode %d", flashMode);
8218 }
8219 } else if (aeMode == CAM_AE_MODE_ON) {
8220 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8221 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8222 } else if (aeMode == CAM_AE_MODE_OFF) {
8223 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8224 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008225 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8226 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8227 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008228 } else {
8229 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8230 "flashMode:%d, aeMode:%u!!!",
8231 redeye, flashMode, aeMode);
8232 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008233 if (mInstantAEC) {
8234 // Increment frame Idx count untill a bound reached for instant AEC.
8235 mInstantAecFrameIdxCount++;
8236 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8237 CAM_INTF_META_AEC_INFO, metadata) {
8238 LOGH("ae_params->settled = %d",ae_params->settled);
8239 // If AEC settled, or if number of frames reached bound value,
8240 // should reset instant AEC.
8241 if (ae_params->settled ||
8242 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8243 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8244 mInstantAEC = false;
8245 mResetInstantAEC = true;
8246 mInstantAecFrameIdxCount = 0;
8247 }
8248 }
8249 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008250 resultMetadata = camMetadata.release();
8251 return resultMetadata;
8252}
8253
8254/*===========================================================================
8255 * FUNCTION : dumpMetadataToFile
8256 *
8257 * DESCRIPTION: Dumps tuning metadata to file system
8258 *
8259 * PARAMETERS :
8260 * @meta : tuning metadata
8261 * @dumpFrameCount : current dump frame count
8262 * @enabled : Enable mask
8263 *
8264 *==========================================================================*/
8265void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8266 uint32_t &dumpFrameCount,
8267 bool enabled,
8268 const char *type,
8269 uint32_t frameNumber)
8270{
8271 //Some sanity checks
8272 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8273 LOGE("Tuning sensor data size bigger than expected %d: %d",
8274 meta.tuning_sensor_data_size,
8275 TUNING_SENSOR_DATA_MAX);
8276 return;
8277 }
8278
8279 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8280 LOGE("Tuning VFE data size bigger than expected %d: %d",
8281 meta.tuning_vfe_data_size,
8282 TUNING_VFE_DATA_MAX);
8283 return;
8284 }
8285
8286 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8287 LOGE("Tuning CPP data size bigger than expected %d: %d",
8288 meta.tuning_cpp_data_size,
8289 TUNING_CPP_DATA_MAX);
8290 return;
8291 }
8292
8293 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8294 LOGE("Tuning CAC data size bigger than expected %d: %d",
8295 meta.tuning_cac_data_size,
8296 TUNING_CAC_DATA_MAX);
8297 return;
8298 }
8299 //
8300
8301 if(enabled){
8302 char timeBuf[FILENAME_MAX];
8303 char buf[FILENAME_MAX];
8304 memset(buf, 0, sizeof(buf));
8305 memset(timeBuf, 0, sizeof(timeBuf));
8306 time_t current_time;
8307 struct tm * timeinfo;
8308 time (&current_time);
8309 timeinfo = localtime (&current_time);
8310 if (timeinfo != NULL) {
8311 strftime (timeBuf, sizeof(timeBuf),
8312 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8313 }
8314 String8 filePath(timeBuf);
8315 snprintf(buf,
8316 sizeof(buf),
8317 "%dm_%s_%d.bin",
8318 dumpFrameCount,
8319 type,
8320 frameNumber);
8321 filePath.append(buf);
8322 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8323 if (file_fd >= 0) {
8324 ssize_t written_len = 0;
8325 meta.tuning_data_version = TUNING_DATA_VERSION;
8326 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8327 written_len += write(file_fd, data, sizeof(uint32_t));
8328 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8329 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8330 written_len += write(file_fd, data, sizeof(uint32_t));
8331 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8332 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8333 written_len += write(file_fd, data, sizeof(uint32_t));
8334 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8335 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8336 written_len += write(file_fd, data, sizeof(uint32_t));
8337 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8338 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8339 written_len += write(file_fd, data, sizeof(uint32_t));
8340 meta.tuning_mod3_data_size = 0;
8341 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8342 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8343 written_len += write(file_fd, data, sizeof(uint32_t));
8344 size_t total_size = meta.tuning_sensor_data_size;
8345 data = (void *)((uint8_t *)&meta.data);
8346 written_len += write(file_fd, data, total_size);
8347 total_size = meta.tuning_vfe_data_size;
8348 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8349 written_len += write(file_fd, data, total_size);
8350 total_size = meta.tuning_cpp_data_size;
8351 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8352 written_len += write(file_fd, data, total_size);
8353 total_size = meta.tuning_cac_data_size;
8354 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8355 written_len += write(file_fd, data, total_size);
8356 close(file_fd);
8357 }else {
8358 LOGE("fail to open file for metadata dumping");
8359 }
8360 }
8361}
8362
8363/*===========================================================================
8364 * FUNCTION : cleanAndSortStreamInfo
8365 *
8366 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8367 * and sort them such that raw stream is at the end of the list
8368 * This is a workaround for camera daemon constraint.
8369 *
8370 * PARAMETERS : None
8371 *
8372 *==========================================================================*/
8373void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8374{
8375 List<stream_info_t *> newStreamInfo;
8376
8377 /*clean up invalid streams*/
8378 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8379 it != mStreamInfo.end();) {
8380 if(((*it)->status) == INVALID){
8381 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8382 delete channel;
8383 free(*it);
8384 it = mStreamInfo.erase(it);
8385 } else {
8386 it++;
8387 }
8388 }
8389
8390 // Move preview/video/callback/snapshot streams into newList
8391 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8392 it != mStreamInfo.end();) {
8393 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8394 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8395 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8396 newStreamInfo.push_back(*it);
8397 it = mStreamInfo.erase(it);
8398 } else
8399 it++;
8400 }
8401 // Move raw streams into newList
8402 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8403 it != mStreamInfo.end();) {
8404 newStreamInfo.push_back(*it);
8405 it = mStreamInfo.erase(it);
8406 }
8407
8408 mStreamInfo = newStreamInfo;
8409}
8410
8411/*===========================================================================
8412 * FUNCTION : extractJpegMetadata
8413 *
8414 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8415 * JPEG metadata is cached in HAL, and return as part of capture
8416 * result when metadata is returned from camera daemon.
8417 *
8418 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8419 * @request: capture request
8420 *
8421 *==========================================================================*/
8422void QCamera3HardwareInterface::extractJpegMetadata(
8423 CameraMetadata& jpegMetadata,
8424 const camera3_capture_request_t *request)
8425{
8426 CameraMetadata frame_settings;
8427 frame_settings = request->settings;
8428
8429 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8430 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8431 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8432 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8433
8434 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8435 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8436 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8437 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8438
8439 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8440 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8441 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8442 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8443
8444 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8445 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8446 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8447 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8448
8449 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8450 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8451 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8452 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8453
8454 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8455 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8456 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8457 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8458
8459 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8460 int32_t thumbnail_size[2];
8461 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8462 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8463 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8464 int32_t orientation =
8465 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008466 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008467 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8468 int32_t temp;
8469 temp = thumbnail_size[0];
8470 thumbnail_size[0] = thumbnail_size[1];
8471 thumbnail_size[1] = temp;
8472 }
8473 }
8474 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8475 thumbnail_size,
8476 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8477 }
8478
8479}
8480
8481/*===========================================================================
8482 * FUNCTION : convertToRegions
8483 *
8484 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8485 *
8486 * PARAMETERS :
8487 * @rect : cam_rect_t struct to convert
8488 * @region : int32_t destination array
8489 * @weight : if we are converting from cam_area_t, weight is valid
8490 * else weight = -1
8491 *
8492 *==========================================================================*/
8493void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8494 int32_t *region, int weight)
8495{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008496 region[FACE_LEFT] = rect.left;
8497 region[FACE_TOP] = rect.top;
8498 region[FACE_RIGHT] = rect.left + rect.width;
8499 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008500 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008501 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008502 }
8503}
8504
8505/*===========================================================================
8506 * FUNCTION : convertFromRegions
8507 *
8508 * DESCRIPTION: helper method to convert from array to cam_rect_t
8509 *
8510 * PARAMETERS :
8511 * @rect : cam_rect_t struct to convert
8512 * @region : int32_t destination array
8513 * @weight : if we are converting from cam_area_t, weight is valid
8514 * else weight = -1
8515 *
8516 *==========================================================================*/
8517void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008518 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008519{
Thierry Strudel3d639192016-09-09 11:52:26 -07008520 int32_t x_min = frame_settings.find(tag).data.i32[0];
8521 int32_t y_min = frame_settings.find(tag).data.i32[1];
8522 int32_t x_max = frame_settings.find(tag).data.i32[2];
8523 int32_t y_max = frame_settings.find(tag).data.i32[3];
8524 roi.weight = frame_settings.find(tag).data.i32[4];
8525 roi.rect.left = x_min;
8526 roi.rect.top = y_min;
8527 roi.rect.width = x_max - x_min;
8528 roi.rect.height = y_max - y_min;
8529}
8530
8531/*===========================================================================
8532 * FUNCTION : resetIfNeededROI
8533 *
8534 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8535 * crop region
8536 *
8537 * PARAMETERS :
8538 * @roi : cam_area_t struct to resize
8539 * @scalerCropRegion : cam_crop_region_t region to compare against
8540 *
8541 *
8542 *==========================================================================*/
8543bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8544 const cam_crop_region_t* scalerCropRegion)
8545{
8546 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8547 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8548 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8549 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8550
8551 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8552 * without having this check the calculations below to validate if the roi
8553 * is inside scalar crop region will fail resulting in the roi not being
8554 * reset causing algorithm to continue to use stale roi window
8555 */
8556 if (roi->weight == 0) {
8557 return true;
8558 }
8559
8560 if ((roi_x_max < scalerCropRegion->left) ||
8561 // right edge of roi window is left of scalar crop's left edge
8562 (roi_y_max < scalerCropRegion->top) ||
8563 // bottom edge of roi window is above scalar crop's top edge
8564 (roi->rect.left > crop_x_max) ||
8565 // left edge of roi window is beyond(right) of scalar crop's right edge
8566 (roi->rect.top > crop_y_max)){
8567 // top edge of roi windo is above scalar crop's top edge
8568 return false;
8569 }
8570 if (roi->rect.left < scalerCropRegion->left) {
8571 roi->rect.left = scalerCropRegion->left;
8572 }
8573 if (roi->rect.top < scalerCropRegion->top) {
8574 roi->rect.top = scalerCropRegion->top;
8575 }
8576 if (roi_x_max > crop_x_max) {
8577 roi_x_max = crop_x_max;
8578 }
8579 if (roi_y_max > crop_y_max) {
8580 roi_y_max = crop_y_max;
8581 }
8582 roi->rect.width = roi_x_max - roi->rect.left;
8583 roi->rect.height = roi_y_max - roi->rect.top;
8584 return true;
8585}
8586
8587/*===========================================================================
8588 * FUNCTION : convertLandmarks
8589 *
8590 * DESCRIPTION: helper method to extract the landmarks from face detection info
8591 *
8592 * PARAMETERS :
8593 * @landmark_data : input landmark data to be converted
8594 * @landmarks : int32_t destination array
8595 *
8596 *
8597 *==========================================================================*/
8598void QCamera3HardwareInterface::convertLandmarks(
8599 cam_face_landmarks_info_t landmark_data,
8600 int32_t *landmarks)
8601{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008602 if (landmark_data.is_left_eye_valid) {
8603 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8604 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8605 } else {
8606 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8607 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8608 }
8609
8610 if (landmark_data.is_right_eye_valid) {
8611 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8612 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8613 } else {
8614 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8615 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8616 }
8617
8618 if (landmark_data.is_mouth_valid) {
8619 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8620 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8621 } else {
8622 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8623 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8624 }
8625}
8626
8627/*===========================================================================
8628 * FUNCTION : setInvalidLandmarks
8629 *
8630 * DESCRIPTION: helper method to set invalid landmarks
8631 *
8632 * PARAMETERS :
8633 * @landmarks : int32_t destination array
8634 *
8635 *
8636 *==========================================================================*/
8637void QCamera3HardwareInterface::setInvalidLandmarks(
8638 int32_t *landmarks)
8639{
8640 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8641 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8642 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8643 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8644 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8645 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008646}
8647
8648#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008649
8650/*===========================================================================
8651 * FUNCTION : getCapabilities
8652 *
8653 * DESCRIPTION: query camera capability from back-end
8654 *
8655 * PARAMETERS :
8656 * @ops : mm-interface ops structure
8657 * @cam_handle : camera handle for which we need capability
8658 *
8659 * RETURN : ptr type of capability structure
8660 * capability for success
8661 * NULL for failure
8662 *==========================================================================*/
8663cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8664 uint32_t cam_handle)
8665{
8666 int rc = NO_ERROR;
8667 QCamera3HeapMemory *capabilityHeap = NULL;
8668 cam_capability_t *cap_ptr = NULL;
8669
8670 if (ops == NULL) {
8671 LOGE("Invalid arguments");
8672 return NULL;
8673 }
8674
8675 capabilityHeap = new QCamera3HeapMemory(1);
8676 if (capabilityHeap == NULL) {
8677 LOGE("creation of capabilityHeap failed");
8678 return NULL;
8679 }
8680
8681 /* Allocate memory for capability buffer */
8682 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8683 if(rc != OK) {
8684 LOGE("No memory for cappability");
8685 goto allocate_failed;
8686 }
8687
8688 /* Map memory for capability buffer */
8689 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8690
8691 rc = ops->map_buf(cam_handle,
8692 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8693 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8694 if(rc < 0) {
8695 LOGE("failed to map capability buffer");
8696 rc = FAILED_TRANSACTION;
8697 goto map_failed;
8698 }
8699
8700 /* Query Capability */
8701 rc = ops->query_capability(cam_handle);
8702 if(rc < 0) {
8703 LOGE("failed to query capability");
8704 rc = FAILED_TRANSACTION;
8705 goto query_failed;
8706 }
8707
8708 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8709 if (cap_ptr == NULL) {
8710 LOGE("out of memory");
8711 rc = NO_MEMORY;
8712 goto query_failed;
8713 }
8714
8715 memset(cap_ptr, 0, sizeof(cam_capability_t));
8716 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8717
8718 int index;
8719 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8720 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8721 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8722 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8723 }
8724
8725query_failed:
8726 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8727map_failed:
8728 capabilityHeap->deallocate();
8729allocate_failed:
8730 delete capabilityHeap;
8731
8732 if (rc != NO_ERROR) {
8733 return NULL;
8734 } else {
8735 return cap_ptr;
8736 }
8737}
8738
Thierry Strudel3d639192016-09-09 11:52:26 -07008739/*===========================================================================
8740 * FUNCTION : initCapabilities
8741 *
8742 * DESCRIPTION: initialize camera capabilities in static data struct
8743 *
8744 * PARAMETERS :
8745 * @cameraId : camera Id
8746 *
8747 * RETURN : int32_t type of status
8748 * NO_ERROR -- success
8749 * none-zero failure code
8750 *==========================================================================*/
8751int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8752{
8753 int rc = 0;
8754 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008755 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008756
8757 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8758 if (rc) {
8759 LOGE("camera_open failed. rc = %d", rc);
8760 goto open_failed;
8761 }
8762 if (!cameraHandle) {
8763 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8764 goto open_failed;
8765 }
8766
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008767 handle = get_main_camera_handle(cameraHandle->camera_handle);
8768 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8769 if (gCamCapability[cameraId] == NULL) {
8770 rc = FAILED_TRANSACTION;
8771 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008772 }
8773
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008774 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008775 if (is_dual_camera_by_idx(cameraId)) {
8776 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8777 gCamCapability[cameraId]->aux_cam_cap =
8778 getCapabilities(cameraHandle->ops, handle);
8779 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8780 rc = FAILED_TRANSACTION;
8781 free(gCamCapability[cameraId]);
8782 goto failed_op;
8783 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008784
8785 // Copy the main camera capability to main_cam_cap struct
8786 gCamCapability[cameraId]->main_cam_cap =
8787 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8788 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8789 LOGE("out of memory");
8790 rc = NO_MEMORY;
8791 goto failed_op;
8792 }
8793 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8794 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008795 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008796failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008797 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8798 cameraHandle = NULL;
8799open_failed:
8800 return rc;
8801}
8802
8803/*==========================================================================
8804 * FUNCTION : get3Aversion
8805 *
8806 * DESCRIPTION: get the Q3A S/W version
8807 *
8808 * PARAMETERS :
8809 * @sw_version: Reference of Q3A structure which will hold version info upon
8810 * return
8811 *
8812 * RETURN : None
8813 *
8814 *==========================================================================*/
8815void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8816{
8817 if(gCamCapability[mCameraId])
8818 sw_version = gCamCapability[mCameraId]->q3a_version;
8819 else
8820 LOGE("Capability structure NULL!");
8821}
8822
8823
8824/*===========================================================================
8825 * FUNCTION : initParameters
8826 *
8827 * DESCRIPTION: initialize camera parameters
8828 *
8829 * PARAMETERS :
8830 *
8831 * RETURN : int32_t type of status
8832 * NO_ERROR -- success
8833 * none-zero failure code
8834 *==========================================================================*/
8835int QCamera3HardwareInterface::initParameters()
8836{
8837 int rc = 0;
8838
8839 //Allocate Set Param Buffer
8840 mParamHeap = new QCamera3HeapMemory(1);
8841 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8842 if(rc != OK) {
8843 rc = NO_MEMORY;
8844 LOGE("Failed to allocate SETPARM Heap memory");
8845 delete mParamHeap;
8846 mParamHeap = NULL;
8847 return rc;
8848 }
8849
8850 //Map memory for parameters buffer
8851 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8852 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8853 mParamHeap->getFd(0),
8854 sizeof(metadata_buffer_t),
8855 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8856 if(rc < 0) {
8857 LOGE("failed to map SETPARM buffer");
8858 rc = FAILED_TRANSACTION;
8859 mParamHeap->deallocate();
8860 delete mParamHeap;
8861 mParamHeap = NULL;
8862 return rc;
8863 }
8864
8865 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8866
8867 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8868 return rc;
8869}
8870
8871/*===========================================================================
8872 * FUNCTION : deinitParameters
8873 *
8874 * DESCRIPTION: de-initialize camera parameters
8875 *
8876 * PARAMETERS :
8877 *
8878 * RETURN : NONE
8879 *==========================================================================*/
8880void QCamera3HardwareInterface::deinitParameters()
8881{
8882 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8883 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8884
8885 mParamHeap->deallocate();
8886 delete mParamHeap;
8887 mParamHeap = NULL;
8888
8889 mParameters = NULL;
8890
8891 free(mPrevParameters);
8892 mPrevParameters = NULL;
8893}
8894
8895/*===========================================================================
8896 * FUNCTION : calcMaxJpegSize
8897 *
8898 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8899 *
8900 * PARAMETERS :
8901 *
8902 * RETURN : max_jpeg_size
8903 *==========================================================================*/
8904size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8905{
8906 size_t max_jpeg_size = 0;
8907 size_t temp_width, temp_height;
8908 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8909 MAX_SIZES_CNT);
8910 for (size_t i = 0; i < count; i++) {
8911 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8912 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8913 if (temp_width * temp_height > max_jpeg_size ) {
8914 max_jpeg_size = temp_width * temp_height;
8915 }
8916 }
8917 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8918 return max_jpeg_size;
8919}
8920
8921/*===========================================================================
8922 * FUNCTION : getMaxRawSize
8923 *
8924 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8925 *
8926 * PARAMETERS :
8927 *
8928 * RETURN : Largest supported Raw Dimension
8929 *==========================================================================*/
8930cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8931{
8932 int max_width = 0;
8933 cam_dimension_t maxRawSize;
8934
8935 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8936 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8937 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8938 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8939 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8940 }
8941 }
8942 return maxRawSize;
8943}
8944
8945
8946/*===========================================================================
8947 * FUNCTION : calcMaxJpegDim
8948 *
8949 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8950 *
8951 * PARAMETERS :
8952 *
8953 * RETURN : max_jpeg_dim
8954 *==========================================================================*/
8955cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8956{
8957 cam_dimension_t max_jpeg_dim;
8958 cam_dimension_t curr_jpeg_dim;
8959 max_jpeg_dim.width = 0;
8960 max_jpeg_dim.height = 0;
8961 curr_jpeg_dim.width = 0;
8962 curr_jpeg_dim.height = 0;
8963 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8964 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8965 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8966 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8967 max_jpeg_dim.width * max_jpeg_dim.height ) {
8968 max_jpeg_dim.width = curr_jpeg_dim.width;
8969 max_jpeg_dim.height = curr_jpeg_dim.height;
8970 }
8971 }
8972 return max_jpeg_dim;
8973}
8974
8975/*===========================================================================
8976 * FUNCTION : addStreamConfig
8977 *
8978 * DESCRIPTION: adds the stream configuration to the array
8979 *
8980 * PARAMETERS :
8981 * @available_stream_configs : pointer to stream configuration array
8982 * @scalar_format : scalar format
8983 * @dim : configuration dimension
8984 * @config_type : input or output configuration type
8985 *
8986 * RETURN : NONE
8987 *==========================================================================*/
8988void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8989 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8990{
8991 available_stream_configs.add(scalar_format);
8992 available_stream_configs.add(dim.width);
8993 available_stream_configs.add(dim.height);
8994 available_stream_configs.add(config_type);
8995}
8996
8997/*===========================================================================
8998 * FUNCTION : suppportBurstCapture
8999 *
9000 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9001 *
9002 * PARAMETERS :
9003 * @cameraId : camera Id
9004 *
9005 * RETURN : true if camera supports BURST_CAPTURE
9006 * false otherwise
9007 *==========================================================================*/
9008bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9009{
9010 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9011 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9012 const int32_t highResWidth = 3264;
9013 const int32_t highResHeight = 2448;
9014
9015 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9016 // Maximum resolution images cannot be captured at >= 10fps
9017 // -> not supporting BURST_CAPTURE
9018 return false;
9019 }
9020
9021 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9022 // Maximum resolution images can be captured at >= 20fps
9023 // --> supporting BURST_CAPTURE
9024 return true;
9025 }
9026
9027 // Find the smallest highRes resolution, or largest resolution if there is none
9028 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9029 MAX_SIZES_CNT);
9030 size_t highRes = 0;
9031 while ((highRes + 1 < totalCnt) &&
9032 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9033 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9034 highResWidth * highResHeight)) {
9035 highRes++;
9036 }
9037 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9038 return true;
9039 } else {
9040 return false;
9041 }
9042}
9043
9044/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009045 * FUNCTION : getPDStatIndex
9046 *
9047 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9048 *
9049 * PARAMETERS :
9050 * @caps : camera capabilities
9051 *
9052 * RETURN : int32_t type
9053 * non-negative - on success
9054 * -1 - on failure
9055 *==========================================================================*/
9056int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9057 if (nullptr == caps) {
9058 return -1;
9059 }
9060
9061 uint32_t metaRawCount = caps->meta_raw_channel_count;
9062 int32_t ret = -1;
9063 for (size_t i = 0; i < metaRawCount; i++) {
9064 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9065 ret = i;
9066 break;
9067 }
9068 }
9069
9070 return ret;
9071}
9072
9073/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009074 * FUNCTION : initStaticMetadata
9075 *
9076 * DESCRIPTION: initialize the static metadata
9077 *
9078 * PARAMETERS :
9079 * @cameraId : camera Id
9080 *
9081 * RETURN : int32_t type of status
9082 * 0 -- success
9083 * non-zero failure code
9084 *==========================================================================*/
9085int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9086{
9087 int rc = 0;
9088 CameraMetadata staticInfo;
9089 size_t count = 0;
9090 bool limitedDevice = false;
9091 char prop[PROPERTY_VALUE_MAX];
9092 bool supportBurst = false;
9093
9094 supportBurst = supportBurstCapture(cameraId);
9095
9096 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9097 * guaranteed or if min fps of max resolution is less than 20 fps, its
9098 * advertised as limited device*/
9099 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9100 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9101 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9102 !supportBurst;
9103
9104 uint8_t supportedHwLvl = limitedDevice ?
9105 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009106#ifndef USE_HAL_3_3
9107 // LEVEL_3 - This device will support level 3.
9108 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9109#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009110 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009111#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009112
9113 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9114 &supportedHwLvl, 1);
9115
9116 bool facingBack = false;
9117 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9118 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9119 facingBack = true;
9120 }
9121 /*HAL 3 only*/
9122 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9123 &gCamCapability[cameraId]->min_focus_distance, 1);
9124
9125 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9126 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9127
9128 /*should be using focal lengths but sensor doesn't provide that info now*/
9129 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9130 &gCamCapability[cameraId]->focal_length,
9131 1);
9132
9133 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9134 gCamCapability[cameraId]->apertures,
9135 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9136
9137 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9138 gCamCapability[cameraId]->filter_densities,
9139 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9140
9141
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009142 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9143 size_t mode_count =
9144 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9145 for (size_t i = 0; i < mode_count; i++) {
9146 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9147 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009148 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009149 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009150
9151 int32_t lens_shading_map_size[] = {
9152 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9153 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9154 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9155 lens_shading_map_size,
9156 sizeof(lens_shading_map_size)/sizeof(int32_t));
9157
9158 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9159 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9160
9161 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9162 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9163
9164 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9165 &gCamCapability[cameraId]->max_frame_duration, 1);
9166
9167 camera_metadata_rational baseGainFactor = {
9168 gCamCapability[cameraId]->base_gain_factor.numerator,
9169 gCamCapability[cameraId]->base_gain_factor.denominator};
9170 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9171 &baseGainFactor, 1);
9172
9173 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9174 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9175
9176 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9177 gCamCapability[cameraId]->pixel_array_size.height};
9178 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9179 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9180
9181 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9182 gCamCapability[cameraId]->active_array_size.top,
9183 gCamCapability[cameraId]->active_array_size.width,
9184 gCamCapability[cameraId]->active_array_size.height};
9185 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9186 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9187
9188 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9189 &gCamCapability[cameraId]->white_level, 1);
9190
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009191 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9192 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9193 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009194 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009195 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009196
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009197#ifndef USE_HAL_3_3
9198 bool hasBlackRegions = false;
9199 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9200 LOGW("black_region_count: %d is bounded to %d",
9201 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9202 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9203 }
9204 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9205 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9206 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9207 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9208 }
9209 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9210 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9211 hasBlackRegions = true;
9212 }
9213#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009214 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9215 &gCamCapability[cameraId]->flash_charge_duration, 1);
9216
9217 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9218 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9219
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009220 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9221 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9222 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009223 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9224 &timestampSource, 1);
9225
Thierry Strudel54dc9782017-02-15 12:12:10 -08009226 //update histogram vendor data
9227 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009228 &gCamCapability[cameraId]->histogram_size, 1);
9229
Thierry Strudel54dc9782017-02-15 12:12:10 -08009230 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009231 &gCamCapability[cameraId]->max_histogram_count, 1);
9232
Shuzhen Wang14415f52016-11-16 18:26:18 -08009233 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9234 //so that app can request fewer number of bins than the maximum supported.
9235 std::vector<int32_t> histBins;
9236 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9237 histBins.push_back(maxHistBins);
9238 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9239 (maxHistBins & 0x1) == 0) {
9240 histBins.push_back(maxHistBins >> 1);
9241 maxHistBins >>= 1;
9242 }
9243 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9244 histBins.data(), histBins.size());
9245
Thierry Strudel3d639192016-09-09 11:52:26 -07009246 int32_t sharpness_map_size[] = {
9247 gCamCapability[cameraId]->sharpness_map_size.width,
9248 gCamCapability[cameraId]->sharpness_map_size.height};
9249
9250 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9251 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9252
9253 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9254 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9255
Emilian Peev0f3c3162017-03-15 12:57:46 +00009256 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9257 if (0 <= indexPD) {
9258 // Advertise PD stats data as part of the Depth capabilities
9259 int32_t depthWidth =
9260 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9261 int32_t depthHeight =
9262 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9263 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9264 assert(0 < depthSamplesCount);
9265 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9266 &depthSamplesCount, 1);
9267
9268 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9269 depthHeight,
9270 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9271 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9272 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9273 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9274 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9275
9276 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9277 depthHeight, 33333333,
9278 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9279 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9280 depthMinDuration,
9281 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9282
9283 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9284 depthHeight, 0,
9285 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9286 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9287 depthStallDuration,
9288 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9289
9290 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9291 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9292 }
9293
Thierry Strudel3d639192016-09-09 11:52:26 -07009294 int32_t scalar_formats[] = {
9295 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9296 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9297 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9298 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9299 HAL_PIXEL_FORMAT_RAW10,
9300 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009301 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9302 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9303 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009304
9305 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9306 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9307 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9308 count, MAX_SIZES_CNT, available_processed_sizes);
9309 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9310 available_processed_sizes, count * 2);
9311
9312 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9313 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9314 makeTable(gCamCapability[cameraId]->raw_dim,
9315 count, MAX_SIZES_CNT, available_raw_sizes);
9316 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9317 available_raw_sizes, count * 2);
9318
9319 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9320 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9321 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9322 count, MAX_SIZES_CNT, available_fps_ranges);
9323 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9324 available_fps_ranges, count * 2);
9325
9326 camera_metadata_rational exposureCompensationStep = {
9327 gCamCapability[cameraId]->exp_compensation_step.numerator,
9328 gCamCapability[cameraId]->exp_compensation_step.denominator};
9329 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9330 &exposureCompensationStep, 1);
9331
9332 Vector<uint8_t> availableVstabModes;
9333 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9334 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009335 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009336 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009337 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009338 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009339 count = IS_TYPE_MAX;
9340 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9341 for (size_t i = 0; i < count; i++) {
9342 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9343 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9344 eisSupported = true;
9345 break;
9346 }
9347 }
9348 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009349 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9350 }
9351 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9352 availableVstabModes.array(), availableVstabModes.size());
9353
9354 /*HAL 1 and HAL 3 common*/
9355 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9356 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9357 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009358 // Cap the max zoom to the max preferred value
9359 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009360 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9361 &maxZoom, 1);
9362
9363 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9364 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9365
9366 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9367 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9368 max3aRegions[2] = 0; /* AF not supported */
9369 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9370 max3aRegions, 3);
9371
9372 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9373 memset(prop, 0, sizeof(prop));
9374 property_get("persist.camera.facedetect", prop, "1");
9375 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9376 LOGD("Support face detection mode: %d",
9377 supportedFaceDetectMode);
9378
9379 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009380 /* support mode should be OFF if max number of face is 0 */
9381 if (maxFaces <= 0) {
9382 supportedFaceDetectMode = 0;
9383 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009384 Vector<uint8_t> availableFaceDetectModes;
9385 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9386 if (supportedFaceDetectMode == 1) {
9387 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9388 } else if (supportedFaceDetectMode == 2) {
9389 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9390 } else if (supportedFaceDetectMode == 3) {
9391 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9392 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9393 } else {
9394 maxFaces = 0;
9395 }
9396 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9397 availableFaceDetectModes.array(),
9398 availableFaceDetectModes.size());
9399 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9400 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009401 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9402 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9403 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009404
9405 int32_t exposureCompensationRange[] = {
9406 gCamCapability[cameraId]->exposure_compensation_min,
9407 gCamCapability[cameraId]->exposure_compensation_max};
9408 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9409 exposureCompensationRange,
9410 sizeof(exposureCompensationRange)/sizeof(int32_t));
9411
9412 uint8_t lensFacing = (facingBack) ?
9413 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9414 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9415
9416 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9417 available_thumbnail_sizes,
9418 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9419
9420 /*all sizes will be clubbed into this tag*/
9421 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9422 /*android.scaler.availableStreamConfigurations*/
9423 Vector<int32_t> available_stream_configs;
9424 cam_dimension_t active_array_dim;
9425 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9426 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009427
9428 /*advertise list of input dimensions supported based on below property.
9429 By default all sizes upto 5MP will be advertised.
9430 Note that the setprop resolution format should be WxH.
9431 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9432 To list all supported sizes, setprop needs to be set with "0x0" */
9433 cam_dimension_t minInputSize = {2592,1944}; //5MP
9434 memset(prop, 0, sizeof(prop));
9435 property_get("persist.camera.input.minsize", prop, "2592x1944");
9436 if (strlen(prop) > 0) {
9437 char *saveptr = NULL;
9438 char *token = strtok_r(prop, "x", &saveptr);
9439 if (token != NULL) {
9440 minInputSize.width = atoi(token);
9441 }
9442 token = strtok_r(NULL, "x", &saveptr);
9443 if (token != NULL) {
9444 minInputSize.height = atoi(token);
9445 }
9446 }
9447
Thierry Strudel3d639192016-09-09 11:52:26 -07009448 /* Add input/output stream configurations for each scalar formats*/
9449 for (size_t j = 0; j < scalar_formats_count; j++) {
9450 switch (scalar_formats[j]) {
9451 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9452 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9453 case HAL_PIXEL_FORMAT_RAW10:
9454 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9455 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9456 addStreamConfig(available_stream_configs, scalar_formats[j],
9457 gCamCapability[cameraId]->raw_dim[i],
9458 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9459 }
9460 break;
9461 case HAL_PIXEL_FORMAT_BLOB:
9462 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9463 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9464 addStreamConfig(available_stream_configs, scalar_formats[j],
9465 gCamCapability[cameraId]->picture_sizes_tbl[i],
9466 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9467 }
9468 break;
9469 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9470 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9471 default:
9472 cam_dimension_t largest_picture_size;
9473 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9474 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9475 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9476 addStreamConfig(available_stream_configs, scalar_formats[j],
9477 gCamCapability[cameraId]->picture_sizes_tbl[i],
9478 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009479 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9480 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9481 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9482 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9483 >= minInputSize.width) || (gCamCapability[cameraId]->
9484 picture_sizes_tbl[i].height >= minInputSize.height)) {
9485 addStreamConfig(available_stream_configs, scalar_formats[j],
9486 gCamCapability[cameraId]->picture_sizes_tbl[i],
9487 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9488 }
9489 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009490 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009491
Thierry Strudel3d639192016-09-09 11:52:26 -07009492 break;
9493 }
9494 }
9495
9496 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9497 available_stream_configs.array(), available_stream_configs.size());
9498 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9499 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9500
9501 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9502 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9503
9504 /* android.scaler.availableMinFrameDurations */
9505 Vector<int64_t> available_min_durations;
9506 for (size_t j = 0; j < scalar_formats_count; j++) {
9507 switch (scalar_formats[j]) {
9508 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9509 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9510 case HAL_PIXEL_FORMAT_RAW10:
9511 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9512 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9513 available_min_durations.add(scalar_formats[j]);
9514 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9515 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9516 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9517 }
9518 break;
9519 default:
9520 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9521 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9522 available_min_durations.add(scalar_formats[j]);
9523 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9524 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9525 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9526 }
9527 break;
9528 }
9529 }
9530 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9531 available_min_durations.array(), available_min_durations.size());
9532
9533 Vector<int32_t> available_hfr_configs;
9534 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9535 int32_t fps = 0;
9536 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9537 case CAM_HFR_MODE_60FPS:
9538 fps = 60;
9539 break;
9540 case CAM_HFR_MODE_90FPS:
9541 fps = 90;
9542 break;
9543 case CAM_HFR_MODE_120FPS:
9544 fps = 120;
9545 break;
9546 case CAM_HFR_MODE_150FPS:
9547 fps = 150;
9548 break;
9549 case CAM_HFR_MODE_180FPS:
9550 fps = 180;
9551 break;
9552 case CAM_HFR_MODE_210FPS:
9553 fps = 210;
9554 break;
9555 case CAM_HFR_MODE_240FPS:
9556 fps = 240;
9557 break;
9558 case CAM_HFR_MODE_480FPS:
9559 fps = 480;
9560 break;
9561 case CAM_HFR_MODE_OFF:
9562 case CAM_HFR_MODE_MAX:
9563 default:
9564 break;
9565 }
9566
9567 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9568 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9569 /* For each HFR frame rate, need to advertise one variable fps range
9570 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9571 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9572 * set by the app. When video recording is started, [120, 120] is
9573 * set. This way sensor configuration does not change when recording
9574 * is started */
9575
9576 /* (width, height, fps_min, fps_max, batch_size_max) */
9577 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9578 j < MAX_SIZES_CNT; j++) {
9579 available_hfr_configs.add(
9580 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9581 available_hfr_configs.add(
9582 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9583 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9584 available_hfr_configs.add(fps);
9585 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9586
9587 /* (width, height, fps_min, fps_max, batch_size_max) */
9588 available_hfr_configs.add(
9589 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9590 available_hfr_configs.add(
9591 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9592 available_hfr_configs.add(fps);
9593 available_hfr_configs.add(fps);
9594 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9595 }
9596 }
9597 }
9598 //Advertise HFR capability only if the property is set
9599 memset(prop, 0, sizeof(prop));
9600 property_get("persist.camera.hal3hfr.enable", prop, "1");
9601 uint8_t hfrEnable = (uint8_t)atoi(prop);
9602
9603 if(hfrEnable && available_hfr_configs.array()) {
9604 staticInfo.update(
9605 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9606 available_hfr_configs.array(), available_hfr_configs.size());
9607 }
9608
9609 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9610 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9611 &max_jpeg_size, 1);
9612
9613 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9614 size_t size = 0;
9615 count = CAM_EFFECT_MODE_MAX;
9616 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9617 for (size_t i = 0; i < count; i++) {
9618 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9619 gCamCapability[cameraId]->supported_effects[i]);
9620 if (NAME_NOT_FOUND != val) {
9621 avail_effects[size] = (uint8_t)val;
9622 size++;
9623 }
9624 }
9625 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9626 avail_effects,
9627 size);
9628
9629 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9630 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9631 size_t supported_scene_modes_cnt = 0;
9632 count = CAM_SCENE_MODE_MAX;
9633 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9634 for (size_t i = 0; i < count; i++) {
9635 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9636 CAM_SCENE_MODE_OFF) {
9637 int val = lookupFwkName(SCENE_MODES_MAP,
9638 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9639 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009640
Thierry Strudel3d639192016-09-09 11:52:26 -07009641 if (NAME_NOT_FOUND != val) {
9642 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9643 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9644 supported_scene_modes_cnt++;
9645 }
9646 }
9647 }
9648 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9649 avail_scene_modes,
9650 supported_scene_modes_cnt);
9651
9652 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9653 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9654 supported_scene_modes_cnt,
9655 CAM_SCENE_MODE_MAX,
9656 scene_mode_overrides,
9657 supported_indexes,
9658 cameraId);
9659
9660 if (supported_scene_modes_cnt == 0) {
9661 supported_scene_modes_cnt = 1;
9662 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9663 }
9664
9665 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9666 scene_mode_overrides, supported_scene_modes_cnt * 3);
9667
9668 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9669 ANDROID_CONTROL_MODE_AUTO,
9670 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9671 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9672 available_control_modes,
9673 3);
9674
9675 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9676 size = 0;
9677 count = CAM_ANTIBANDING_MODE_MAX;
9678 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9679 for (size_t i = 0; i < count; i++) {
9680 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9681 gCamCapability[cameraId]->supported_antibandings[i]);
9682 if (NAME_NOT_FOUND != val) {
9683 avail_antibanding_modes[size] = (uint8_t)val;
9684 size++;
9685 }
9686
9687 }
9688 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9689 avail_antibanding_modes,
9690 size);
9691
9692 uint8_t avail_abberation_modes[] = {
9693 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9694 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9695 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9696 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9697 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9698 if (0 == count) {
9699 // If no aberration correction modes are available for a device, this advertise OFF mode
9700 size = 1;
9701 } else {
9702 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9703 // So, advertize all 3 modes if atleast any one mode is supported as per the
9704 // new M requirement
9705 size = 3;
9706 }
9707 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9708 avail_abberation_modes,
9709 size);
9710
9711 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9712 size = 0;
9713 count = CAM_FOCUS_MODE_MAX;
9714 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9715 for (size_t i = 0; i < count; i++) {
9716 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9717 gCamCapability[cameraId]->supported_focus_modes[i]);
9718 if (NAME_NOT_FOUND != val) {
9719 avail_af_modes[size] = (uint8_t)val;
9720 size++;
9721 }
9722 }
9723 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9724 avail_af_modes,
9725 size);
9726
9727 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9728 size = 0;
9729 count = CAM_WB_MODE_MAX;
9730 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9731 for (size_t i = 0; i < count; i++) {
9732 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9733 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9734 gCamCapability[cameraId]->supported_white_balances[i]);
9735 if (NAME_NOT_FOUND != val) {
9736 avail_awb_modes[size] = (uint8_t)val;
9737 size++;
9738 }
9739 }
9740 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9741 avail_awb_modes,
9742 size);
9743
9744 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9745 count = CAM_FLASH_FIRING_LEVEL_MAX;
9746 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9747 count);
9748 for (size_t i = 0; i < count; i++) {
9749 available_flash_levels[i] =
9750 gCamCapability[cameraId]->supported_firing_levels[i];
9751 }
9752 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9753 available_flash_levels, count);
9754
9755 uint8_t flashAvailable;
9756 if (gCamCapability[cameraId]->flash_available)
9757 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9758 else
9759 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9760 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9761 &flashAvailable, 1);
9762
9763 Vector<uint8_t> avail_ae_modes;
9764 count = CAM_AE_MODE_MAX;
9765 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9766 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009767 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9768 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9769 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9770 }
9771 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009772 }
9773 if (flashAvailable) {
9774 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9775 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9776 }
9777 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9778 avail_ae_modes.array(),
9779 avail_ae_modes.size());
9780
9781 int32_t sensitivity_range[2];
9782 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9783 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9784 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9785 sensitivity_range,
9786 sizeof(sensitivity_range) / sizeof(int32_t));
9787
9788 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9789 &gCamCapability[cameraId]->max_analog_sensitivity,
9790 1);
9791
9792 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9793 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9794 &sensor_orientation,
9795 1);
9796
9797 int32_t max_output_streams[] = {
9798 MAX_STALLING_STREAMS,
9799 MAX_PROCESSED_STREAMS,
9800 MAX_RAW_STREAMS};
9801 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9802 max_output_streams,
9803 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9804
9805 uint8_t avail_leds = 0;
9806 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9807 &avail_leds, 0);
9808
9809 uint8_t focus_dist_calibrated;
9810 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9811 gCamCapability[cameraId]->focus_dist_calibrated);
9812 if (NAME_NOT_FOUND != val) {
9813 focus_dist_calibrated = (uint8_t)val;
9814 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9815 &focus_dist_calibrated, 1);
9816 }
9817
9818 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9819 size = 0;
9820 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9821 MAX_TEST_PATTERN_CNT);
9822 for (size_t i = 0; i < count; i++) {
9823 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9824 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9825 if (NAME_NOT_FOUND != testpatternMode) {
9826 avail_testpattern_modes[size] = testpatternMode;
9827 size++;
9828 }
9829 }
9830 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9831 avail_testpattern_modes,
9832 size);
9833
9834 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9835 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9836 &max_pipeline_depth,
9837 1);
9838
9839 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9840 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9841 &partial_result_count,
9842 1);
9843
9844 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9845 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9846
9847 Vector<uint8_t> available_capabilities;
9848 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9849 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9850 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9851 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9852 if (supportBurst) {
9853 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9854 }
9855 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9856 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9857 if (hfrEnable && available_hfr_configs.array()) {
9858 available_capabilities.add(
9859 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9860 }
9861
9862 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9863 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9864 }
9865 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9866 available_capabilities.array(),
9867 available_capabilities.size());
9868
9869 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9870 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9871 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9872 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9873
9874 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9875 &aeLockAvailable, 1);
9876
9877 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9878 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9879 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9880 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9881
9882 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9883 &awbLockAvailable, 1);
9884
9885 int32_t max_input_streams = 1;
9886 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9887 &max_input_streams,
9888 1);
9889
9890 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9891 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9892 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9893 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9894 HAL_PIXEL_FORMAT_YCbCr_420_888};
9895 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9896 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9897
9898 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9899 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9900 &max_latency,
9901 1);
9902
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009903#ifndef USE_HAL_3_3
9904 int32_t isp_sensitivity_range[2];
9905 isp_sensitivity_range[0] =
9906 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9907 isp_sensitivity_range[1] =
9908 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9909 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9910 isp_sensitivity_range,
9911 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9912#endif
9913
Thierry Strudel3d639192016-09-09 11:52:26 -07009914 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9915 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9916 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9917 available_hot_pixel_modes,
9918 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9919
9920 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9921 ANDROID_SHADING_MODE_FAST,
9922 ANDROID_SHADING_MODE_HIGH_QUALITY};
9923 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9924 available_shading_modes,
9925 3);
9926
9927 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9928 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9929 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9930 available_lens_shading_map_modes,
9931 2);
9932
9933 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9934 ANDROID_EDGE_MODE_FAST,
9935 ANDROID_EDGE_MODE_HIGH_QUALITY,
9936 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9937 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9938 available_edge_modes,
9939 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9940
9941 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9942 ANDROID_NOISE_REDUCTION_MODE_FAST,
9943 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9944 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9945 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9946 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9947 available_noise_red_modes,
9948 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9949
9950 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9951 ANDROID_TONEMAP_MODE_FAST,
9952 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9953 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9954 available_tonemap_modes,
9955 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9956
9957 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9958 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9959 available_hot_pixel_map_modes,
9960 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9961
9962 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9963 gCamCapability[cameraId]->reference_illuminant1);
9964 if (NAME_NOT_FOUND != val) {
9965 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9966 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9967 }
9968
9969 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9970 gCamCapability[cameraId]->reference_illuminant2);
9971 if (NAME_NOT_FOUND != val) {
9972 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9973 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9974 }
9975
9976 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9977 (void *)gCamCapability[cameraId]->forward_matrix1,
9978 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9979
9980 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9981 (void *)gCamCapability[cameraId]->forward_matrix2,
9982 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9983
9984 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9985 (void *)gCamCapability[cameraId]->color_transform1,
9986 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9987
9988 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9989 (void *)gCamCapability[cameraId]->color_transform2,
9990 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9991
9992 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9993 (void *)gCamCapability[cameraId]->calibration_transform1,
9994 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9995
9996 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9997 (void *)gCamCapability[cameraId]->calibration_transform2,
9998 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9999
10000 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10001 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10002 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10003 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10004 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10005 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10006 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10007 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10008 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10009 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10010 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10011 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10012 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10013 ANDROID_JPEG_GPS_COORDINATES,
10014 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10015 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10016 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10017 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10018 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10019 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10020 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10021 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10022 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10023 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010024#ifndef USE_HAL_3_3
10025 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10026#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010027 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010028 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010029 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10030 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010031 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010032 /* DevCamDebug metadata request_keys_basic */
10033 DEVCAMDEBUG_META_ENABLE,
10034 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010035 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010036 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010037 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010038 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -080010039 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010040
10041 size_t request_keys_cnt =
10042 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10043 Vector<int32_t> available_request_keys;
10044 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10045 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10046 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10047 }
10048
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010049 if (gExposeEnableZslKey) {
Chien-Yu Chened0a4c92017-05-01 18:25:03 +000010050 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010051 }
10052
Thierry Strudel3d639192016-09-09 11:52:26 -070010053 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10054 available_request_keys.array(), available_request_keys.size());
10055
10056 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10057 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10058 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10059 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10060 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10061 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10062 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10063 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10064 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10065 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10066 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10067 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10068 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10069 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10070 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10071 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10072 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010073 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010074 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10075 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10076 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010077 ANDROID_STATISTICS_FACE_SCORES,
10078#ifndef USE_HAL_3_3
10079 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10080#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010081 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010082 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010083 // DevCamDebug metadata result_keys_basic
10084 DEVCAMDEBUG_META_ENABLE,
10085 // DevCamDebug metadata result_keys AF
10086 DEVCAMDEBUG_AF_LENS_POSITION,
10087 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10088 DEVCAMDEBUG_AF_TOF_DISTANCE,
10089 DEVCAMDEBUG_AF_LUMA,
10090 DEVCAMDEBUG_AF_HAF_STATE,
10091 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10092 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10093 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10094 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10095 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10096 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10097 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10098 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10099 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10100 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10101 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10102 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10103 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10104 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10105 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10106 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10107 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10108 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10109 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10110 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10111 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10112 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10113 // DevCamDebug metadata result_keys AEC
10114 DEVCAMDEBUG_AEC_TARGET_LUMA,
10115 DEVCAMDEBUG_AEC_COMP_LUMA,
10116 DEVCAMDEBUG_AEC_AVG_LUMA,
10117 DEVCAMDEBUG_AEC_CUR_LUMA,
10118 DEVCAMDEBUG_AEC_LINECOUNT,
10119 DEVCAMDEBUG_AEC_REAL_GAIN,
10120 DEVCAMDEBUG_AEC_EXP_INDEX,
10121 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010122 // DevCamDebug metadata result_keys zzHDR
10123 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10124 DEVCAMDEBUG_AEC_L_LINECOUNT,
10125 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10126 DEVCAMDEBUG_AEC_S_LINECOUNT,
10127 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10128 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10129 // DevCamDebug metadata result_keys ADRC
10130 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10131 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10132 DEVCAMDEBUG_AEC_GTM_RATIO,
10133 DEVCAMDEBUG_AEC_LTM_RATIO,
10134 DEVCAMDEBUG_AEC_LA_RATIO,
10135 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010136 // DevCamDebug metadata result_keys AWB
10137 DEVCAMDEBUG_AWB_R_GAIN,
10138 DEVCAMDEBUG_AWB_G_GAIN,
10139 DEVCAMDEBUG_AWB_B_GAIN,
10140 DEVCAMDEBUG_AWB_CCT,
10141 DEVCAMDEBUG_AWB_DECISION,
10142 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010143 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10144 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10145 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010146 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010147 };
10148
Thierry Strudel3d639192016-09-09 11:52:26 -070010149 size_t result_keys_cnt =
10150 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10151
10152 Vector<int32_t> available_result_keys;
10153 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10154 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10155 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10156 }
10157 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10158 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10159 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10160 }
10161 if (supportedFaceDetectMode == 1) {
10162 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10163 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10164 } else if ((supportedFaceDetectMode == 2) ||
10165 (supportedFaceDetectMode == 3)) {
10166 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10167 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10168 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010169#ifndef USE_HAL_3_3
10170 if (hasBlackRegions) {
10171 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10172 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10173 }
10174#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010175
10176 if (gExposeEnableZslKey) {
10177 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10178 }
10179
Thierry Strudel3d639192016-09-09 11:52:26 -070010180 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10181 available_result_keys.array(), available_result_keys.size());
10182
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010183 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010184 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10185 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10186 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10187 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10188 ANDROID_SCALER_CROPPING_TYPE,
10189 ANDROID_SYNC_MAX_LATENCY,
10190 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10191 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10192 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10193 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10194 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10195 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10196 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10197 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10198 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10199 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10200 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10201 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10202 ANDROID_LENS_FACING,
10203 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10204 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10205 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10206 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10207 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10208 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10209 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10210 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10211 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10212 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10213 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10214 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10215 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10216 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10217 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10218 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10219 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10220 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10221 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10222 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010223 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010224 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10225 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10226 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10227 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10228 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10229 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10230 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10231 ANDROID_CONTROL_AVAILABLE_MODES,
10232 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10233 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10234 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10235 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010236 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10237#ifndef USE_HAL_3_3
10238 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10239 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10240#endif
10241 };
10242
10243 Vector<int32_t> available_characteristics_keys;
10244 available_characteristics_keys.appendArray(characteristics_keys_basic,
10245 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10246#ifndef USE_HAL_3_3
10247 if (hasBlackRegions) {
10248 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10249 }
10250#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010251
10252 if (0 <= indexPD) {
10253 int32_t depthKeys[] = {
10254 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10255 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10256 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10257 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10258 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10259 };
10260 available_characteristics_keys.appendArray(depthKeys,
10261 sizeof(depthKeys) / sizeof(depthKeys[0]));
10262 }
10263
Thierry Strudel3d639192016-09-09 11:52:26 -070010264 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010265 available_characteristics_keys.array(),
10266 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010267
10268 /*available stall durations depend on the hw + sw and will be different for different devices */
10269 /*have to add for raw after implementation*/
10270 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10271 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10272
10273 Vector<int64_t> available_stall_durations;
10274 for (uint32_t j = 0; j < stall_formats_count; j++) {
10275 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10276 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10277 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10278 available_stall_durations.add(stall_formats[j]);
10279 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10280 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10281 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10282 }
10283 } else {
10284 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10285 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10286 available_stall_durations.add(stall_formats[j]);
10287 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10288 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10289 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10290 }
10291 }
10292 }
10293 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10294 available_stall_durations.array(),
10295 available_stall_durations.size());
10296
10297 //QCAMERA3_OPAQUE_RAW
10298 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10299 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10300 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10301 case LEGACY_RAW:
10302 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10303 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10304 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10305 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10306 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10307 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10308 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10309 break;
10310 case MIPI_RAW:
10311 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10312 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10313 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10314 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10315 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10316 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10317 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10318 break;
10319 default:
10320 LOGE("unknown opaque_raw_format %d",
10321 gCamCapability[cameraId]->opaque_raw_fmt);
10322 break;
10323 }
10324 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10325
10326 Vector<int32_t> strides;
10327 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10328 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10329 cam_stream_buf_plane_info_t buf_planes;
10330 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10331 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10332 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10333 &gCamCapability[cameraId]->padding_info, &buf_planes);
10334 strides.add(buf_planes.plane_info.mp[0].stride);
10335 }
10336 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10337 strides.size());
10338
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010339 //TBD: remove the following line once backend advertises zzHDR in feature mask
10340 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010341 //Video HDR default
10342 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10343 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010344 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010345 int32_t vhdr_mode[] = {
10346 QCAMERA3_VIDEO_HDR_MODE_OFF,
10347 QCAMERA3_VIDEO_HDR_MODE_ON};
10348
10349 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10350 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10351 vhdr_mode, vhdr_mode_count);
10352 }
10353
Thierry Strudel3d639192016-09-09 11:52:26 -070010354 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10355 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10356 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10357
10358 uint8_t isMonoOnly =
10359 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10360 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10361 &isMonoOnly, 1);
10362
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010363#ifndef USE_HAL_3_3
10364 Vector<int32_t> opaque_size;
10365 for (size_t j = 0; j < scalar_formats_count; j++) {
10366 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10367 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10368 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10369 cam_stream_buf_plane_info_t buf_planes;
10370
10371 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10372 &gCamCapability[cameraId]->padding_info, &buf_planes);
10373
10374 if (rc == 0) {
10375 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10376 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10377 opaque_size.add(buf_planes.plane_info.frame_len);
10378 }else {
10379 LOGE("raw frame calculation failed!");
10380 }
10381 }
10382 }
10383 }
10384
10385 if ((opaque_size.size() > 0) &&
10386 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10387 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10388 else
10389 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10390#endif
10391
Thierry Strudel04e026f2016-10-10 11:27:36 -070010392 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10393 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10394 size = 0;
10395 count = CAM_IR_MODE_MAX;
10396 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10397 for (size_t i = 0; i < count; i++) {
10398 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10399 gCamCapability[cameraId]->supported_ir_modes[i]);
10400 if (NAME_NOT_FOUND != val) {
10401 avail_ir_modes[size] = (int32_t)val;
10402 size++;
10403 }
10404 }
10405 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10406 avail_ir_modes, size);
10407 }
10408
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010409 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10410 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10411 size = 0;
10412 count = CAM_AEC_CONVERGENCE_MAX;
10413 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10414 for (size_t i = 0; i < count; i++) {
10415 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10416 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10417 if (NAME_NOT_FOUND != val) {
10418 available_instant_aec_modes[size] = (int32_t)val;
10419 size++;
10420 }
10421 }
10422 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10423 available_instant_aec_modes, size);
10424 }
10425
Thierry Strudel54dc9782017-02-15 12:12:10 -080010426 int32_t sharpness_range[] = {
10427 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10428 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10429 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10430
10431 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10432 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10433 size = 0;
10434 count = CAM_BINNING_CORRECTION_MODE_MAX;
10435 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10436 for (size_t i = 0; i < count; i++) {
10437 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10438 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10439 gCamCapability[cameraId]->supported_binning_modes[i]);
10440 if (NAME_NOT_FOUND != val) {
10441 avail_binning_modes[size] = (int32_t)val;
10442 size++;
10443 }
10444 }
10445 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10446 avail_binning_modes, size);
10447 }
10448
10449 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10450 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10451 size = 0;
10452 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10453 for (size_t i = 0; i < count; i++) {
10454 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10455 gCamCapability[cameraId]->supported_aec_modes[i]);
10456 if (NAME_NOT_FOUND != val)
10457 available_aec_modes[size++] = val;
10458 }
10459 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10460 available_aec_modes, size);
10461 }
10462
10463 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10464 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10465 size = 0;
10466 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10467 for (size_t i = 0; i < count; i++) {
10468 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10469 gCamCapability[cameraId]->supported_iso_modes[i]);
10470 if (NAME_NOT_FOUND != val)
10471 available_iso_modes[size++] = val;
10472 }
10473 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10474 available_iso_modes, size);
10475 }
10476
10477 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010478 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010479 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10480 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10481 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10482
10483 int32_t available_saturation_range[4];
10484 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10485 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10486 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10487 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10488 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10489 available_saturation_range, 4);
10490
10491 uint8_t is_hdr_values[2];
10492 is_hdr_values[0] = 0;
10493 is_hdr_values[1] = 1;
10494 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10495 is_hdr_values, 2);
10496
10497 float is_hdr_confidence_range[2];
10498 is_hdr_confidence_range[0] = 0.0;
10499 is_hdr_confidence_range[1] = 1.0;
10500 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10501 is_hdr_confidence_range, 2);
10502
Emilian Peev0a972ef2017-03-16 10:25:53 +000010503 size_t eepromLength = strnlen(
10504 reinterpret_cast<const char *>(
10505 gCamCapability[cameraId]->eeprom_version_info),
10506 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10507 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010508 char easelInfo[] = ",E:N";
10509 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10510 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10511 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010512 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10513 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010514 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010515 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10516 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10517 }
10518
Thierry Strudel3d639192016-09-09 11:52:26 -070010519 gStaticMetadata[cameraId] = staticInfo.release();
10520 return rc;
10521}
10522
10523/*===========================================================================
10524 * FUNCTION : makeTable
10525 *
10526 * DESCRIPTION: make a table of sizes
10527 *
10528 * PARAMETERS :
10529 *
10530 *
10531 *==========================================================================*/
10532void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10533 size_t max_size, int32_t *sizeTable)
10534{
10535 size_t j = 0;
10536 if (size > max_size) {
10537 size = max_size;
10538 }
10539 for (size_t i = 0; i < size; i++) {
10540 sizeTable[j] = dimTable[i].width;
10541 sizeTable[j+1] = dimTable[i].height;
10542 j+=2;
10543 }
10544}
10545
10546/*===========================================================================
10547 * FUNCTION : makeFPSTable
10548 *
10549 * DESCRIPTION: make a table of fps ranges
10550 *
10551 * PARAMETERS :
10552 *
10553 *==========================================================================*/
10554void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10555 size_t max_size, int32_t *fpsRangesTable)
10556{
10557 size_t j = 0;
10558 if (size > max_size) {
10559 size = max_size;
10560 }
10561 for (size_t i = 0; i < size; i++) {
10562 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10563 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10564 j+=2;
10565 }
10566}
10567
10568/*===========================================================================
10569 * FUNCTION : makeOverridesList
10570 *
10571 * DESCRIPTION: make a list of scene mode overrides
10572 *
10573 * PARAMETERS :
10574 *
10575 *
10576 *==========================================================================*/
10577void QCamera3HardwareInterface::makeOverridesList(
10578 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10579 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10580{
10581 /*daemon will give a list of overrides for all scene modes.
10582 However we should send the fwk only the overrides for the scene modes
10583 supported by the framework*/
10584 size_t j = 0;
10585 if (size > max_size) {
10586 size = max_size;
10587 }
10588 size_t focus_count = CAM_FOCUS_MODE_MAX;
10589 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10590 focus_count);
10591 for (size_t i = 0; i < size; i++) {
10592 bool supt = false;
10593 size_t index = supported_indexes[i];
10594 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10595 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10596 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10597 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10598 overridesTable[index].awb_mode);
10599 if (NAME_NOT_FOUND != val) {
10600 overridesList[j+1] = (uint8_t)val;
10601 }
10602 uint8_t focus_override = overridesTable[index].af_mode;
10603 for (size_t k = 0; k < focus_count; k++) {
10604 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10605 supt = true;
10606 break;
10607 }
10608 }
10609 if (supt) {
10610 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10611 focus_override);
10612 if (NAME_NOT_FOUND != val) {
10613 overridesList[j+2] = (uint8_t)val;
10614 }
10615 } else {
10616 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10617 }
10618 j+=3;
10619 }
10620}
10621
10622/*===========================================================================
10623 * FUNCTION : filterJpegSizes
10624 *
10625 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10626 * could be downscaled to
10627 *
10628 * PARAMETERS :
10629 *
10630 * RETURN : length of jpegSizes array
10631 *==========================================================================*/
10632
10633size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10634 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10635 uint8_t downscale_factor)
10636{
10637 if (0 == downscale_factor) {
10638 downscale_factor = 1;
10639 }
10640
10641 int32_t min_width = active_array_size.width / downscale_factor;
10642 int32_t min_height = active_array_size.height / downscale_factor;
10643 size_t jpegSizesCnt = 0;
10644 if (processedSizesCnt > maxCount) {
10645 processedSizesCnt = maxCount;
10646 }
10647 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10648 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10649 jpegSizes[jpegSizesCnt] = processedSizes[i];
10650 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10651 jpegSizesCnt += 2;
10652 }
10653 }
10654 return jpegSizesCnt;
10655}
10656
10657/*===========================================================================
10658 * FUNCTION : computeNoiseModelEntryS
10659 *
10660 * DESCRIPTION: function to map a given sensitivity to the S noise
10661 * model parameters in the DNG noise model.
10662 *
10663 * PARAMETERS : sens : the sensor sensitivity
10664 *
10665 ** RETURN : S (sensor amplification) noise
10666 *
10667 *==========================================================================*/
10668double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10669 double s = gCamCapability[mCameraId]->gradient_S * sens +
10670 gCamCapability[mCameraId]->offset_S;
10671 return ((s < 0.0) ? 0.0 : s);
10672}
10673
10674/*===========================================================================
10675 * FUNCTION : computeNoiseModelEntryO
10676 *
10677 * DESCRIPTION: function to map a given sensitivity to the O noise
10678 * model parameters in the DNG noise model.
10679 *
10680 * PARAMETERS : sens : the sensor sensitivity
10681 *
10682 ** RETURN : O (sensor readout) noise
10683 *
10684 *==========================================================================*/
10685double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10686 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10687 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10688 1.0 : (1.0 * sens / max_analog_sens);
10689 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10690 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10691 return ((o < 0.0) ? 0.0 : o);
10692}
10693
10694/*===========================================================================
10695 * FUNCTION : getSensorSensitivity
10696 *
10697 * DESCRIPTION: convert iso_mode to an integer value
10698 *
10699 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10700 *
10701 ** RETURN : sensitivity supported by sensor
10702 *
10703 *==========================================================================*/
10704int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10705{
10706 int32_t sensitivity;
10707
10708 switch (iso_mode) {
10709 case CAM_ISO_MODE_100:
10710 sensitivity = 100;
10711 break;
10712 case CAM_ISO_MODE_200:
10713 sensitivity = 200;
10714 break;
10715 case CAM_ISO_MODE_400:
10716 sensitivity = 400;
10717 break;
10718 case CAM_ISO_MODE_800:
10719 sensitivity = 800;
10720 break;
10721 case CAM_ISO_MODE_1600:
10722 sensitivity = 1600;
10723 break;
10724 default:
10725 sensitivity = -1;
10726 break;
10727 }
10728 return sensitivity;
10729}
10730
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010731int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010732 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010733 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10734 // to connect to Easel.
10735 bool doNotpowerOnEasel =
10736 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10737
10738 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010739 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10740 return OK;
10741 }
10742
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010743 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010744 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010745 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010746 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010747 return res;
10748 }
10749
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010750 EaselManagerClientOpened = true;
10751
10752 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010753 if (res != OK) {
10754 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10755 }
10756
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010757 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010758 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010759
10760 // Expose enableZsl key only when HDR+ mode is enabled.
10761 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010762 }
10763
10764 return OK;
10765}
10766
Thierry Strudel3d639192016-09-09 11:52:26 -070010767/*===========================================================================
10768 * FUNCTION : getCamInfo
10769 *
10770 * DESCRIPTION: query camera capabilities
10771 *
10772 * PARAMETERS :
10773 * @cameraId : camera Id
10774 * @info : camera info struct to be filled in with camera capabilities
10775 *
10776 * RETURN : int type of status
10777 * NO_ERROR -- success
10778 * none-zero failure code
10779 *==========================================================================*/
10780int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10781 struct camera_info *info)
10782{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010783 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010784 int rc = 0;
10785
10786 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010787
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010788 {
10789 Mutex::Autolock l(gHdrPlusClientLock);
10790 rc = initHdrPlusClientLocked();
10791 if (rc != OK) {
10792 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10793 pthread_mutex_unlock(&gCamLock);
10794 return rc;
10795 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010796 }
10797
Thierry Strudel3d639192016-09-09 11:52:26 -070010798 if (NULL == gCamCapability[cameraId]) {
10799 rc = initCapabilities(cameraId);
10800 if (rc < 0) {
10801 pthread_mutex_unlock(&gCamLock);
10802 return rc;
10803 }
10804 }
10805
10806 if (NULL == gStaticMetadata[cameraId]) {
10807 rc = initStaticMetadata(cameraId);
10808 if (rc < 0) {
10809 pthread_mutex_unlock(&gCamLock);
10810 return rc;
10811 }
10812 }
10813
10814 switch(gCamCapability[cameraId]->position) {
10815 case CAM_POSITION_BACK:
10816 case CAM_POSITION_BACK_AUX:
10817 info->facing = CAMERA_FACING_BACK;
10818 break;
10819
10820 case CAM_POSITION_FRONT:
10821 case CAM_POSITION_FRONT_AUX:
10822 info->facing = CAMERA_FACING_FRONT;
10823 break;
10824
10825 default:
10826 LOGE("Unknown position type %d for camera id:%d",
10827 gCamCapability[cameraId]->position, cameraId);
10828 rc = -1;
10829 break;
10830 }
10831
10832
10833 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010834#ifndef USE_HAL_3_3
10835 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10836#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010837 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010838#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010839 info->static_camera_characteristics = gStaticMetadata[cameraId];
10840
10841 //For now assume both cameras can operate independently.
10842 info->conflicting_devices = NULL;
10843 info->conflicting_devices_length = 0;
10844
10845 //resource cost is 100 * MIN(1.0, m/M),
10846 //where m is throughput requirement with maximum stream configuration
10847 //and M is CPP maximum throughput.
10848 float max_fps = 0.0;
10849 for (uint32_t i = 0;
10850 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10851 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10852 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10853 }
10854 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10855 gCamCapability[cameraId]->active_array_size.width *
10856 gCamCapability[cameraId]->active_array_size.height * max_fps /
10857 gCamCapability[cameraId]->max_pixel_bandwidth;
10858 info->resource_cost = 100 * MIN(1.0, ratio);
10859 LOGI("camera %d resource cost is %d", cameraId,
10860 info->resource_cost);
10861
10862 pthread_mutex_unlock(&gCamLock);
10863 return rc;
10864}
10865
10866/*===========================================================================
10867 * FUNCTION : translateCapabilityToMetadata
10868 *
10869 * DESCRIPTION: translate the capability into camera_metadata_t
10870 *
10871 * PARAMETERS : type of the request
10872 *
10873 *
10874 * RETURN : success: camera_metadata_t*
10875 * failure: NULL
10876 *
10877 *==========================================================================*/
10878camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10879{
10880 if (mDefaultMetadata[type] != NULL) {
10881 return mDefaultMetadata[type];
10882 }
10883 //first time we are handling this request
10884 //fill up the metadata structure using the wrapper class
10885 CameraMetadata settings;
10886 //translate from cam_capability_t to camera_metadata_tag_t
10887 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10888 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10889 int32_t defaultRequestID = 0;
10890 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10891
10892 /* OIS disable */
10893 char ois_prop[PROPERTY_VALUE_MAX];
10894 memset(ois_prop, 0, sizeof(ois_prop));
10895 property_get("persist.camera.ois.disable", ois_prop, "0");
10896 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10897
10898 /* Force video to use OIS */
10899 char videoOisProp[PROPERTY_VALUE_MAX];
10900 memset(videoOisProp, 0, sizeof(videoOisProp));
10901 property_get("persist.camera.ois.video", videoOisProp, "1");
10902 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010903
10904 // Hybrid AE enable/disable
10905 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10906 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10907 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10908 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10909
Thierry Strudel3d639192016-09-09 11:52:26 -070010910 uint8_t controlIntent = 0;
10911 uint8_t focusMode;
10912 uint8_t vsMode;
10913 uint8_t optStabMode;
10914 uint8_t cacMode;
10915 uint8_t edge_mode;
10916 uint8_t noise_red_mode;
10917 uint8_t tonemap_mode;
10918 bool highQualityModeEntryAvailable = FALSE;
10919 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010920 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010921 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10922 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010923 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010924 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010925 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010926
Thierry Strudel3d639192016-09-09 11:52:26 -070010927 switch (type) {
10928 case CAMERA3_TEMPLATE_PREVIEW:
10929 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10930 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10931 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10932 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10933 edge_mode = ANDROID_EDGE_MODE_FAST;
10934 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10935 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10936 break;
10937 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10938 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10939 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10940 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10941 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10942 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10943 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10944 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10945 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10946 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10947 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10948 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10949 highQualityModeEntryAvailable = TRUE;
10950 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10951 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10952 fastModeEntryAvailable = TRUE;
10953 }
10954 }
10955 if (highQualityModeEntryAvailable) {
10956 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10957 } else if (fastModeEntryAvailable) {
10958 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10959 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010960 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10961 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10962 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010963 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010964 break;
10965 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10966 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10967 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10968 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010969 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10970 edge_mode = ANDROID_EDGE_MODE_FAST;
10971 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10972 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10973 if (forceVideoOis)
10974 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10975 break;
10976 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10977 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10978 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10979 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010980 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10981 edge_mode = ANDROID_EDGE_MODE_FAST;
10982 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10983 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10984 if (forceVideoOis)
10985 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10986 break;
10987 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10988 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10989 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10990 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10991 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10992 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10993 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10994 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10995 break;
10996 case CAMERA3_TEMPLATE_MANUAL:
10997 edge_mode = ANDROID_EDGE_MODE_FAST;
10998 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10999 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11000 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11001 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11002 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11003 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11004 break;
11005 default:
11006 edge_mode = ANDROID_EDGE_MODE_FAST;
11007 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11008 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11009 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11010 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11011 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11012 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11013 break;
11014 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011015 // Set CAC to OFF if underlying device doesn't support
11016 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11017 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11018 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011019 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11020 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11021 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11022 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11023 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11024 }
11025 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011026 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011027 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011028
11029 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11030 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11031 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11032 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11033 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11034 || ois_disable)
11035 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11036 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011037 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011038
11039 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11040 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11041
11042 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11043 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11044
11045 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11046 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11047
11048 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11049 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11050
11051 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11052 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11053
11054 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11055 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11056
11057 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11058 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11059
11060 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11061 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11062
11063 /*flash*/
11064 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11065 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11066
11067 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11068 settings.update(ANDROID_FLASH_FIRING_POWER,
11069 &flashFiringLevel, 1);
11070
11071 /* lens */
11072 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11073 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11074
11075 if (gCamCapability[mCameraId]->filter_densities_count) {
11076 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11077 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11078 gCamCapability[mCameraId]->filter_densities_count);
11079 }
11080
11081 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11082 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11083
Thierry Strudel3d639192016-09-09 11:52:26 -070011084 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11085 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11086
11087 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11088 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11089
11090 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11091 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11092
11093 /* face detection (default to OFF) */
11094 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11095 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11096
Thierry Strudel54dc9782017-02-15 12:12:10 -080011097 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11098 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011099
11100 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11101 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11102
11103 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11104 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11105
Thierry Strudel3d639192016-09-09 11:52:26 -070011106
11107 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11108 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11109
11110 /* Exposure time(Update the Min Exposure Time)*/
11111 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11112 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11113
11114 /* frame duration */
11115 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11116 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11117
11118 /* sensitivity */
11119 static const int32_t default_sensitivity = 100;
11120 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011121#ifndef USE_HAL_3_3
11122 static const int32_t default_isp_sensitivity =
11123 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11124 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11125#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011126
11127 /*edge mode*/
11128 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11129
11130 /*noise reduction mode*/
11131 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11132
11133 /*color correction mode*/
11134 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11135 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11136
11137 /*transform matrix mode*/
11138 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11139
11140 int32_t scaler_crop_region[4];
11141 scaler_crop_region[0] = 0;
11142 scaler_crop_region[1] = 0;
11143 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11144 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11145 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11146
11147 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11148 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11149
11150 /*focus distance*/
11151 float focus_distance = 0.0;
11152 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11153
11154 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011155 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011156 float max_range = 0.0;
11157 float max_fixed_fps = 0.0;
11158 int32_t fps_range[2] = {0, 0};
11159 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11160 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011161 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11162 TEMPLATE_MAX_PREVIEW_FPS) {
11163 continue;
11164 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011165 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11166 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11167 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11168 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11169 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11170 if (range > max_range) {
11171 fps_range[0] =
11172 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11173 fps_range[1] =
11174 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11175 max_range = range;
11176 }
11177 } else {
11178 if (range < 0.01 && max_fixed_fps <
11179 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11180 fps_range[0] =
11181 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11182 fps_range[1] =
11183 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11184 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11185 }
11186 }
11187 }
11188 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11189
11190 /*precapture trigger*/
11191 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11192 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11193
11194 /*af trigger*/
11195 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11196 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11197
11198 /* ae & af regions */
11199 int32_t active_region[] = {
11200 gCamCapability[mCameraId]->active_array_size.left,
11201 gCamCapability[mCameraId]->active_array_size.top,
11202 gCamCapability[mCameraId]->active_array_size.left +
11203 gCamCapability[mCameraId]->active_array_size.width,
11204 gCamCapability[mCameraId]->active_array_size.top +
11205 gCamCapability[mCameraId]->active_array_size.height,
11206 0};
11207 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11208 sizeof(active_region) / sizeof(active_region[0]));
11209 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11210 sizeof(active_region) / sizeof(active_region[0]));
11211
11212 /* black level lock */
11213 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11214 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11215
Thierry Strudel3d639192016-09-09 11:52:26 -070011216 //special defaults for manual template
11217 if (type == CAMERA3_TEMPLATE_MANUAL) {
11218 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11219 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11220
11221 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11222 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11223
11224 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11225 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11226
11227 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11228 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11229
11230 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11231 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11232
11233 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11234 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11235 }
11236
11237
11238 /* TNR
11239 * We'll use this location to determine which modes TNR will be set.
11240 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11241 * This is not to be confused with linking on a per stream basis that decision
11242 * is still on per-session basis and will be handled as part of config stream
11243 */
11244 uint8_t tnr_enable = 0;
11245
11246 if (m_bTnrPreview || m_bTnrVideo) {
11247
11248 switch (type) {
11249 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11250 tnr_enable = 1;
11251 break;
11252
11253 default:
11254 tnr_enable = 0;
11255 break;
11256 }
11257
11258 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11259 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11260 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11261
11262 LOGD("TNR:%d with process plate %d for template:%d",
11263 tnr_enable, tnr_process_type, type);
11264 }
11265
11266 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011267 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011268 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11269
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011270 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011271 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11272
Shuzhen Wang920ea402017-05-03 08:49:39 -070011273 uint8_t related_camera_id = mCameraId;
11274 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011275
11276 /* CDS default */
11277 char prop[PROPERTY_VALUE_MAX];
11278 memset(prop, 0, sizeof(prop));
11279 property_get("persist.camera.CDS", prop, "Auto");
11280 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11281 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11282 if (CAM_CDS_MODE_MAX == cds_mode) {
11283 cds_mode = CAM_CDS_MODE_AUTO;
11284 }
11285
11286 /* Disabling CDS in templates which have TNR enabled*/
11287 if (tnr_enable)
11288 cds_mode = CAM_CDS_MODE_OFF;
11289
11290 int32_t mode = cds_mode;
11291 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011292
Thierry Strudel269c81a2016-10-12 12:13:59 -070011293 /* Manual Convergence AEC Speed is disabled by default*/
11294 float default_aec_speed = 0;
11295 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11296
11297 /* Manual Convergence AWB Speed is disabled by default*/
11298 float default_awb_speed = 0;
11299 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11300
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011301 // Set instant AEC to normal convergence by default
11302 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11303 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11304
Shuzhen Wang19463d72016-03-08 11:09:52 -080011305 /* hybrid ae */
11306 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11307
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011308 if (gExposeEnableZslKey) {
11309 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11310 }
11311
Thierry Strudel3d639192016-09-09 11:52:26 -070011312 mDefaultMetadata[type] = settings.release();
11313
11314 return mDefaultMetadata[type];
11315}
11316
11317/*===========================================================================
11318 * FUNCTION : setFrameParameters
11319 *
11320 * DESCRIPTION: set parameters per frame as requested in the metadata from
11321 * framework
11322 *
11323 * PARAMETERS :
11324 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011325 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011326 * @blob_request: Whether this request is a blob request or not
11327 *
11328 * RETURN : success: NO_ERROR
11329 * failure:
11330 *==========================================================================*/
11331int QCamera3HardwareInterface::setFrameParameters(
11332 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011333 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011334 int blob_request,
11335 uint32_t snapshotStreamId)
11336{
11337 /*translate from camera_metadata_t type to parm_type_t*/
11338 int rc = 0;
11339 int32_t hal_version = CAM_HAL_V3;
11340
11341 clear_metadata_buffer(mParameters);
11342 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11343 LOGE("Failed to set hal version in the parameters");
11344 return BAD_VALUE;
11345 }
11346
11347 /*we need to update the frame number in the parameters*/
11348 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11349 request->frame_number)) {
11350 LOGE("Failed to set the frame number in the parameters");
11351 return BAD_VALUE;
11352 }
11353
11354 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011355 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011356 LOGE("Failed to set stream type mask in the parameters");
11357 return BAD_VALUE;
11358 }
11359
11360 if (mUpdateDebugLevel) {
11361 uint32_t dummyDebugLevel = 0;
11362 /* The value of dummyDebugLevel is irrelavent. On
11363 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11364 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11365 dummyDebugLevel)) {
11366 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11367 return BAD_VALUE;
11368 }
11369 mUpdateDebugLevel = false;
11370 }
11371
11372 if(request->settings != NULL){
11373 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11374 if (blob_request)
11375 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11376 }
11377
11378 return rc;
11379}
11380
11381/*===========================================================================
11382 * FUNCTION : setReprocParameters
11383 *
11384 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11385 * return it.
11386 *
11387 * PARAMETERS :
11388 * @request : request that needs to be serviced
11389 *
11390 * RETURN : success: NO_ERROR
11391 * failure:
11392 *==========================================================================*/
11393int32_t QCamera3HardwareInterface::setReprocParameters(
11394 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11395 uint32_t snapshotStreamId)
11396{
11397 /*translate from camera_metadata_t type to parm_type_t*/
11398 int rc = 0;
11399
11400 if (NULL == request->settings){
11401 LOGE("Reprocess settings cannot be NULL");
11402 return BAD_VALUE;
11403 }
11404
11405 if (NULL == reprocParam) {
11406 LOGE("Invalid reprocessing metadata buffer");
11407 return BAD_VALUE;
11408 }
11409 clear_metadata_buffer(reprocParam);
11410
11411 /*we need to update the frame number in the parameters*/
11412 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11413 request->frame_number)) {
11414 LOGE("Failed to set the frame number in the parameters");
11415 return BAD_VALUE;
11416 }
11417
11418 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11419 if (rc < 0) {
11420 LOGE("Failed to translate reproc request");
11421 return rc;
11422 }
11423
11424 CameraMetadata frame_settings;
11425 frame_settings = request->settings;
11426 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11427 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11428 int32_t *crop_count =
11429 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11430 int32_t *crop_data =
11431 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11432 int32_t *roi_map =
11433 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11434 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11435 cam_crop_data_t crop_meta;
11436 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11437 crop_meta.num_of_streams = 1;
11438 crop_meta.crop_info[0].crop.left = crop_data[0];
11439 crop_meta.crop_info[0].crop.top = crop_data[1];
11440 crop_meta.crop_info[0].crop.width = crop_data[2];
11441 crop_meta.crop_info[0].crop.height = crop_data[3];
11442
11443 crop_meta.crop_info[0].roi_map.left =
11444 roi_map[0];
11445 crop_meta.crop_info[0].roi_map.top =
11446 roi_map[1];
11447 crop_meta.crop_info[0].roi_map.width =
11448 roi_map[2];
11449 crop_meta.crop_info[0].roi_map.height =
11450 roi_map[3];
11451
11452 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11453 rc = BAD_VALUE;
11454 }
11455 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11456 request->input_buffer->stream,
11457 crop_meta.crop_info[0].crop.left,
11458 crop_meta.crop_info[0].crop.top,
11459 crop_meta.crop_info[0].crop.width,
11460 crop_meta.crop_info[0].crop.height);
11461 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11462 request->input_buffer->stream,
11463 crop_meta.crop_info[0].roi_map.left,
11464 crop_meta.crop_info[0].roi_map.top,
11465 crop_meta.crop_info[0].roi_map.width,
11466 crop_meta.crop_info[0].roi_map.height);
11467 } else {
11468 LOGE("Invalid reprocess crop count %d!", *crop_count);
11469 }
11470 } else {
11471 LOGE("No crop data from matching output stream");
11472 }
11473
11474 /* These settings are not needed for regular requests so handle them specially for
11475 reprocess requests; information needed for EXIF tags */
11476 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11477 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11478 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11479 if (NAME_NOT_FOUND != val) {
11480 uint32_t flashMode = (uint32_t)val;
11481 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11482 rc = BAD_VALUE;
11483 }
11484 } else {
11485 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11486 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11487 }
11488 } else {
11489 LOGH("No flash mode in reprocess settings");
11490 }
11491
11492 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11493 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11494 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11495 rc = BAD_VALUE;
11496 }
11497 } else {
11498 LOGH("No flash state in reprocess settings");
11499 }
11500
11501 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11502 uint8_t *reprocessFlags =
11503 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11504 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11505 *reprocessFlags)) {
11506 rc = BAD_VALUE;
11507 }
11508 }
11509
Thierry Strudel54dc9782017-02-15 12:12:10 -080011510 // Add exif debug data to internal metadata
11511 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11512 mm_jpeg_debug_exif_params_t *debug_params =
11513 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11514 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11515 // AE
11516 if (debug_params->ae_debug_params_valid == TRUE) {
11517 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11518 debug_params->ae_debug_params);
11519 }
11520 // AWB
11521 if (debug_params->awb_debug_params_valid == TRUE) {
11522 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11523 debug_params->awb_debug_params);
11524 }
11525 // AF
11526 if (debug_params->af_debug_params_valid == TRUE) {
11527 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11528 debug_params->af_debug_params);
11529 }
11530 // ASD
11531 if (debug_params->asd_debug_params_valid == TRUE) {
11532 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11533 debug_params->asd_debug_params);
11534 }
11535 // Stats
11536 if (debug_params->stats_debug_params_valid == TRUE) {
11537 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11538 debug_params->stats_debug_params);
11539 }
11540 // BE Stats
11541 if (debug_params->bestats_debug_params_valid == TRUE) {
11542 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11543 debug_params->bestats_debug_params);
11544 }
11545 // BHIST
11546 if (debug_params->bhist_debug_params_valid == TRUE) {
11547 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11548 debug_params->bhist_debug_params);
11549 }
11550 // 3A Tuning
11551 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11552 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11553 debug_params->q3a_tuning_debug_params);
11554 }
11555 }
11556
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011557 // Add metadata which reprocess needs
11558 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11559 cam_reprocess_info_t *repro_info =
11560 (cam_reprocess_info_t *)frame_settings.find
11561 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011562 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011563 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011564 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011565 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011566 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011567 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011568 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011569 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011570 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011571 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011572 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011573 repro_info->pipeline_flip);
11574 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11575 repro_info->af_roi);
11576 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11577 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011578 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11579 CAM_INTF_PARM_ROTATION metadata then has been added in
11580 translateToHalMetadata. HAL need to keep this new rotation
11581 metadata. Otherwise, the old rotation info saved in the vendor tag
11582 would be used */
11583 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11584 CAM_INTF_PARM_ROTATION, reprocParam) {
11585 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11586 } else {
11587 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011588 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011589 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011590 }
11591
11592 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11593 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11594 roi.width and roi.height would be the final JPEG size.
11595 For now, HAL only checks this for reprocess request */
11596 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11597 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11598 uint8_t *enable =
11599 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11600 if (*enable == TRUE) {
11601 int32_t *crop_data =
11602 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11603 cam_stream_crop_info_t crop_meta;
11604 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11605 crop_meta.stream_id = 0;
11606 crop_meta.crop.left = crop_data[0];
11607 crop_meta.crop.top = crop_data[1];
11608 crop_meta.crop.width = crop_data[2];
11609 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011610 // The JPEG crop roi should match cpp output size
11611 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11612 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11613 crop_meta.roi_map.left = 0;
11614 crop_meta.roi_map.top = 0;
11615 crop_meta.roi_map.width = cpp_crop->crop.width;
11616 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011617 }
11618 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11619 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011620 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011621 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011622 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11623 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011624 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011625 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11626
11627 // Add JPEG scale information
11628 cam_dimension_t scale_dim;
11629 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11630 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11631 int32_t *roi =
11632 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11633 scale_dim.width = roi[2];
11634 scale_dim.height = roi[3];
11635 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11636 scale_dim);
11637 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11638 scale_dim.width, scale_dim.height, mCameraId);
11639 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011640 }
11641 }
11642
11643 return rc;
11644}
11645
11646/*===========================================================================
11647 * FUNCTION : saveRequestSettings
11648 *
11649 * DESCRIPTION: Add any settings that might have changed to the request settings
11650 * and save the settings to be applied on the frame
11651 *
11652 * PARAMETERS :
11653 * @jpegMetadata : the extracted and/or modified jpeg metadata
11654 * @request : request with initial settings
11655 *
11656 * RETURN :
11657 * camera_metadata_t* : pointer to the saved request settings
11658 *==========================================================================*/
11659camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11660 const CameraMetadata &jpegMetadata,
11661 camera3_capture_request_t *request)
11662{
11663 camera_metadata_t *resultMetadata;
11664 CameraMetadata camMetadata;
11665 camMetadata = request->settings;
11666
11667 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11668 int32_t thumbnail_size[2];
11669 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11670 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11671 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11672 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11673 }
11674
11675 if (request->input_buffer != NULL) {
11676 uint8_t reprocessFlags = 1;
11677 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11678 (uint8_t*)&reprocessFlags,
11679 sizeof(reprocessFlags));
11680 }
11681
11682 resultMetadata = camMetadata.release();
11683 return resultMetadata;
11684}
11685
11686/*===========================================================================
11687 * FUNCTION : setHalFpsRange
11688 *
11689 * DESCRIPTION: set FPS range parameter
11690 *
11691 *
11692 * PARAMETERS :
11693 * @settings : Metadata from framework
11694 * @hal_metadata: Metadata buffer
11695 *
11696 *
11697 * RETURN : success: NO_ERROR
11698 * failure:
11699 *==========================================================================*/
11700int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11701 metadata_buffer_t *hal_metadata)
11702{
11703 int32_t rc = NO_ERROR;
11704 cam_fps_range_t fps_range;
11705 fps_range.min_fps = (float)
11706 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11707 fps_range.max_fps = (float)
11708 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11709 fps_range.video_min_fps = fps_range.min_fps;
11710 fps_range.video_max_fps = fps_range.max_fps;
11711
11712 LOGD("aeTargetFpsRange fps: [%f %f]",
11713 fps_range.min_fps, fps_range.max_fps);
11714 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11715 * follows:
11716 * ---------------------------------------------------------------|
11717 * Video stream is absent in configure_streams |
11718 * (Camcorder preview before the first video record |
11719 * ---------------------------------------------------------------|
11720 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11721 * | | | vid_min/max_fps|
11722 * ---------------------------------------------------------------|
11723 * NO | [ 30, 240] | 240 | [240, 240] |
11724 * |-------------|-------------|----------------|
11725 * | [240, 240] | 240 | [240, 240] |
11726 * ---------------------------------------------------------------|
11727 * Video stream is present in configure_streams |
11728 * ---------------------------------------------------------------|
11729 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11730 * | | | vid_min/max_fps|
11731 * ---------------------------------------------------------------|
11732 * NO | [ 30, 240] | 240 | [240, 240] |
11733 * (camcorder prev |-------------|-------------|----------------|
11734 * after video rec | [240, 240] | 240 | [240, 240] |
11735 * is stopped) | | | |
11736 * ---------------------------------------------------------------|
11737 * YES | [ 30, 240] | 240 | [240, 240] |
11738 * |-------------|-------------|----------------|
11739 * | [240, 240] | 240 | [240, 240] |
11740 * ---------------------------------------------------------------|
11741 * When Video stream is absent in configure_streams,
11742 * preview fps = sensor_fps / batchsize
11743 * Eg: for 240fps at batchSize 4, preview = 60fps
11744 * for 120fps at batchSize 4, preview = 30fps
11745 *
11746 * When video stream is present in configure_streams, preview fps is as per
11747 * the ratio of preview buffers to video buffers requested in process
11748 * capture request
11749 */
11750 mBatchSize = 0;
11751 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11752 fps_range.min_fps = fps_range.video_max_fps;
11753 fps_range.video_min_fps = fps_range.video_max_fps;
11754 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11755 fps_range.max_fps);
11756 if (NAME_NOT_FOUND != val) {
11757 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11758 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11759 return BAD_VALUE;
11760 }
11761
11762 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11763 /* If batchmode is currently in progress and the fps changes,
11764 * set the flag to restart the sensor */
11765 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11766 (mHFRVideoFps != fps_range.max_fps)) {
11767 mNeedSensorRestart = true;
11768 }
11769 mHFRVideoFps = fps_range.max_fps;
11770 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11771 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11772 mBatchSize = MAX_HFR_BATCH_SIZE;
11773 }
11774 }
11775 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11776
11777 }
11778 } else {
11779 /* HFR mode is session param in backend/ISP. This should be reset when
11780 * in non-HFR mode */
11781 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11782 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11783 return BAD_VALUE;
11784 }
11785 }
11786 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11787 return BAD_VALUE;
11788 }
11789 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11790 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11791 return rc;
11792}
11793
11794/*===========================================================================
11795 * FUNCTION : translateToHalMetadata
11796 *
11797 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11798 *
11799 *
11800 * PARAMETERS :
11801 * @request : request sent from framework
11802 *
11803 *
11804 * RETURN : success: NO_ERROR
11805 * failure:
11806 *==========================================================================*/
11807int QCamera3HardwareInterface::translateToHalMetadata
11808 (const camera3_capture_request_t *request,
11809 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011810 uint32_t snapshotStreamId) {
11811 if (request == nullptr || hal_metadata == nullptr) {
11812 return BAD_VALUE;
11813 }
11814
11815 int64_t minFrameDuration = getMinFrameDuration(request);
11816
11817 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11818 minFrameDuration);
11819}
11820
11821int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11822 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11823 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11824
Thierry Strudel3d639192016-09-09 11:52:26 -070011825 int rc = 0;
11826 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011827 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011828
11829 /* Do not change the order of the following list unless you know what you are
11830 * doing.
11831 * The order is laid out in such a way that parameters in the front of the table
11832 * may be used to override the parameters later in the table. Examples are:
11833 * 1. META_MODE should precede AEC/AWB/AF MODE
11834 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11835 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11836 * 4. Any mode should precede it's corresponding settings
11837 */
11838 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11839 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11840 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11841 rc = BAD_VALUE;
11842 }
11843 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11844 if (rc != NO_ERROR) {
11845 LOGE("extractSceneMode failed");
11846 }
11847 }
11848
11849 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11850 uint8_t fwk_aeMode =
11851 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11852 uint8_t aeMode;
11853 int32_t redeye;
11854
11855 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11856 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011857 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11858 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011859 } else {
11860 aeMode = CAM_AE_MODE_ON;
11861 }
11862 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11863 redeye = 1;
11864 } else {
11865 redeye = 0;
11866 }
11867
11868 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11869 fwk_aeMode);
11870 if (NAME_NOT_FOUND != val) {
11871 int32_t flashMode = (int32_t)val;
11872 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11873 }
11874
11875 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11876 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11877 rc = BAD_VALUE;
11878 }
11879 }
11880
11881 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11882 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11883 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11884 fwk_whiteLevel);
11885 if (NAME_NOT_FOUND != val) {
11886 uint8_t whiteLevel = (uint8_t)val;
11887 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11888 rc = BAD_VALUE;
11889 }
11890 }
11891 }
11892
11893 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11894 uint8_t fwk_cacMode =
11895 frame_settings.find(
11896 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11897 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11898 fwk_cacMode);
11899 if (NAME_NOT_FOUND != val) {
11900 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11901 bool entryAvailable = FALSE;
11902 // Check whether Frameworks set CAC mode is supported in device or not
11903 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11904 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11905 entryAvailable = TRUE;
11906 break;
11907 }
11908 }
11909 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11910 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11911 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11912 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11913 if (entryAvailable == FALSE) {
11914 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11915 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11916 } else {
11917 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11918 // High is not supported and so set the FAST as spec say's underlying
11919 // device implementation can be the same for both modes.
11920 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11921 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11922 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11923 // in order to avoid the fps drop due to high quality
11924 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11925 } else {
11926 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11927 }
11928 }
11929 }
11930 LOGD("Final cacMode is %d", cacMode);
11931 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11932 rc = BAD_VALUE;
11933 }
11934 } else {
11935 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11936 }
11937 }
11938
Thierry Strudel2896d122017-02-23 19:18:03 -080011939 char af_value[PROPERTY_VALUE_MAX];
11940 property_get("persist.camera.af.infinity", af_value, "0");
11941
Jason Lee84ae9972017-02-24 13:24:24 -080011942 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011943 if (atoi(af_value) == 0) {
11944 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011945 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011946 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11947 fwk_focusMode);
11948 if (NAME_NOT_FOUND != val) {
11949 uint8_t focusMode = (uint8_t)val;
11950 LOGD("set focus mode %d", focusMode);
11951 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11952 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11953 rc = BAD_VALUE;
11954 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011955 }
11956 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011957 } else {
11958 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11959 LOGE("Focus forced to infinity %d", focusMode);
11960 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11961 rc = BAD_VALUE;
11962 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011963 }
11964
Jason Lee84ae9972017-02-24 13:24:24 -080011965 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11966 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011967 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11968 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11969 focalDistance)) {
11970 rc = BAD_VALUE;
11971 }
11972 }
11973
11974 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11975 uint8_t fwk_antibandingMode =
11976 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11977 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11978 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11979 if (NAME_NOT_FOUND != val) {
11980 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011981 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11982 if (m60HzZone) {
11983 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11984 } else {
11985 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11986 }
11987 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011988 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11989 hal_antibandingMode)) {
11990 rc = BAD_VALUE;
11991 }
11992 }
11993 }
11994
11995 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11996 int32_t expCompensation = frame_settings.find(
11997 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11998 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11999 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12000 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12001 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012002 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012003 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12004 expCompensation)) {
12005 rc = BAD_VALUE;
12006 }
12007 }
12008
12009 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12010 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12011 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12012 rc = BAD_VALUE;
12013 }
12014 }
12015 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12016 rc = setHalFpsRange(frame_settings, hal_metadata);
12017 if (rc != NO_ERROR) {
12018 LOGE("setHalFpsRange failed");
12019 }
12020 }
12021
12022 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12023 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12024 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12025 rc = BAD_VALUE;
12026 }
12027 }
12028
12029 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12030 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12031 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12032 fwk_effectMode);
12033 if (NAME_NOT_FOUND != val) {
12034 uint8_t effectMode = (uint8_t)val;
12035 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12036 rc = BAD_VALUE;
12037 }
12038 }
12039 }
12040
12041 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12042 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12043 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12044 colorCorrectMode)) {
12045 rc = BAD_VALUE;
12046 }
12047 }
12048
12049 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12050 cam_color_correct_gains_t colorCorrectGains;
12051 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12052 colorCorrectGains.gains[i] =
12053 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12054 }
12055 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12056 colorCorrectGains)) {
12057 rc = BAD_VALUE;
12058 }
12059 }
12060
12061 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12062 cam_color_correct_matrix_t colorCorrectTransform;
12063 cam_rational_type_t transform_elem;
12064 size_t num = 0;
12065 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12066 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12067 transform_elem.numerator =
12068 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12069 transform_elem.denominator =
12070 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12071 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12072 num++;
12073 }
12074 }
12075 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12076 colorCorrectTransform)) {
12077 rc = BAD_VALUE;
12078 }
12079 }
12080
12081 cam_trigger_t aecTrigger;
12082 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12083 aecTrigger.trigger_id = -1;
12084 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12085 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12086 aecTrigger.trigger =
12087 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12088 aecTrigger.trigger_id =
12089 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12090 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12091 aecTrigger)) {
12092 rc = BAD_VALUE;
12093 }
12094 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12095 aecTrigger.trigger, aecTrigger.trigger_id);
12096 }
12097
12098 /*af_trigger must come with a trigger id*/
12099 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12100 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12101 cam_trigger_t af_trigger;
12102 af_trigger.trigger =
12103 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12104 af_trigger.trigger_id =
12105 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12106 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12107 rc = BAD_VALUE;
12108 }
12109 LOGD("AfTrigger: %d AfTriggerID: %d",
12110 af_trigger.trigger, af_trigger.trigger_id);
12111 }
12112
12113 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12114 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12115 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12116 rc = BAD_VALUE;
12117 }
12118 }
12119 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12120 cam_edge_application_t edge_application;
12121 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012122
Thierry Strudel3d639192016-09-09 11:52:26 -070012123 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12124 edge_application.sharpness = 0;
12125 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012126 edge_application.sharpness =
12127 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12128 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12129 int32_t sharpness =
12130 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12131 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12132 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12133 LOGD("Setting edge mode sharpness %d", sharpness);
12134 edge_application.sharpness = sharpness;
12135 }
12136 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012137 }
12138 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12139 rc = BAD_VALUE;
12140 }
12141 }
12142
12143 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12144 int32_t respectFlashMode = 1;
12145 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12146 uint8_t fwk_aeMode =
12147 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012148 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12149 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12150 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012151 respectFlashMode = 0;
12152 LOGH("AE Mode controls flash, ignore android.flash.mode");
12153 }
12154 }
12155 if (respectFlashMode) {
12156 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12157 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12158 LOGH("flash mode after mapping %d", val);
12159 // To check: CAM_INTF_META_FLASH_MODE usage
12160 if (NAME_NOT_FOUND != val) {
12161 uint8_t flashMode = (uint8_t)val;
12162 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12163 rc = BAD_VALUE;
12164 }
12165 }
12166 }
12167 }
12168
12169 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12170 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12171 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12172 rc = BAD_VALUE;
12173 }
12174 }
12175
12176 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12177 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12178 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12179 flashFiringTime)) {
12180 rc = BAD_VALUE;
12181 }
12182 }
12183
12184 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12185 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12186 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12187 hotPixelMode)) {
12188 rc = BAD_VALUE;
12189 }
12190 }
12191
12192 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12193 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12194 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12195 lensAperture)) {
12196 rc = BAD_VALUE;
12197 }
12198 }
12199
12200 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12201 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12202 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12203 filterDensity)) {
12204 rc = BAD_VALUE;
12205 }
12206 }
12207
12208 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12209 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12210 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12211 focalLength)) {
12212 rc = BAD_VALUE;
12213 }
12214 }
12215
12216 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12217 uint8_t optStabMode =
12218 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12219 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12220 optStabMode)) {
12221 rc = BAD_VALUE;
12222 }
12223 }
12224
12225 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12226 uint8_t videoStabMode =
12227 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12228 LOGD("videoStabMode from APP = %d", videoStabMode);
12229 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12230 videoStabMode)) {
12231 rc = BAD_VALUE;
12232 }
12233 }
12234
12235
12236 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12237 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12238 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12239 noiseRedMode)) {
12240 rc = BAD_VALUE;
12241 }
12242 }
12243
12244 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12245 float reprocessEffectiveExposureFactor =
12246 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12247 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12248 reprocessEffectiveExposureFactor)) {
12249 rc = BAD_VALUE;
12250 }
12251 }
12252
12253 cam_crop_region_t scalerCropRegion;
12254 bool scalerCropSet = false;
12255 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12256 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12257 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12258 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12259 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12260
12261 // Map coordinate system from active array to sensor output.
12262 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12263 scalerCropRegion.width, scalerCropRegion.height);
12264
12265 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12266 scalerCropRegion)) {
12267 rc = BAD_VALUE;
12268 }
12269 scalerCropSet = true;
12270 }
12271
12272 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12273 int64_t sensorExpTime =
12274 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12275 LOGD("setting sensorExpTime %lld", sensorExpTime);
12276 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12277 sensorExpTime)) {
12278 rc = BAD_VALUE;
12279 }
12280 }
12281
12282 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12283 int64_t sensorFrameDuration =
12284 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012285 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12286 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12287 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12288 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12289 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12290 sensorFrameDuration)) {
12291 rc = BAD_VALUE;
12292 }
12293 }
12294
12295 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12296 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12297 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12298 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12299 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12300 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12301 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12302 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12303 sensorSensitivity)) {
12304 rc = BAD_VALUE;
12305 }
12306 }
12307
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012308#ifndef USE_HAL_3_3
12309 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12310 int32_t ispSensitivity =
12311 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12312 if (ispSensitivity <
12313 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12314 ispSensitivity =
12315 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12316 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12317 }
12318 if (ispSensitivity >
12319 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12320 ispSensitivity =
12321 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12322 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12323 }
12324 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12325 ispSensitivity)) {
12326 rc = BAD_VALUE;
12327 }
12328 }
12329#endif
12330
Thierry Strudel3d639192016-09-09 11:52:26 -070012331 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12332 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12333 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12334 rc = BAD_VALUE;
12335 }
12336 }
12337
12338 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12339 uint8_t fwk_facedetectMode =
12340 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12341
12342 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12343 fwk_facedetectMode);
12344
12345 if (NAME_NOT_FOUND != val) {
12346 uint8_t facedetectMode = (uint8_t)val;
12347 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12348 facedetectMode)) {
12349 rc = BAD_VALUE;
12350 }
12351 }
12352 }
12353
Thierry Strudel54dc9782017-02-15 12:12:10 -080012354 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012355 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012356 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012357 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12358 histogramMode)) {
12359 rc = BAD_VALUE;
12360 }
12361 }
12362
12363 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12364 uint8_t sharpnessMapMode =
12365 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12366 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12367 sharpnessMapMode)) {
12368 rc = BAD_VALUE;
12369 }
12370 }
12371
12372 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12373 uint8_t tonemapMode =
12374 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12375 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12376 rc = BAD_VALUE;
12377 }
12378 }
12379 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12380 /*All tonemap channels will have the same number of points*/
12381 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12382 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12383 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12384 cam_rgb_tonemap_curves tonemapCurves;
12385 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12386 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12387 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12388 tonemapCurves.tonemap_points_cnt,
12389 CAM_MAX_TONEMAP_CURVE_SIZE);
12390 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12391 }
12392
12393 /* ch0 = G*/
12394 size_t point = 0;
12395 cam_tonemap_curve_t tonemapCurveGreen;
12396 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12397 for (size_t j = 0; j < 2; j++) {
12398 tonemapCurveGreen.tonemap_points[i][j] =
12399 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12400 point++;
12401 }
12402 }
12403 tonemapCurves.curves[0] = tonemapCurveGreen;
12404
12405 /* ch 1 = B */
12406 point = 0;
12407 cam_tonemap_curve_t tonemapCurveBlue;
12408 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12409 for (size_t j = 0; j < 2; j++) {
12410 tonemapCurveBlue.tonemap_points[i][j] =
12411 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12412 point++;
12413 }
12414 }
12415 tonemapCurves.curves[1] = tonemapCurveBlue;
12416
12417 /* ch 2 = R */
12418 point = 0;
12419 cam_tonemap_curve_t tonemapCurveRed;
12420 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12421 for (size_t j = 0; j < 2; j++) {
12422 tonemapCurveRed.tonemap_points[i][j] =
12423 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12424 point++;
12425 }
12426 }
12427 tonemapCurves.curves[2] = tonemapCurveRed;
12428
12429 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12430 tonemapCurves)) {
12431 rc = BAD_VALUE;
12432 }
12433 }
12434
12435 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12436 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12437 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12438 captureIntent)) {
12439 rc = BAD_VALUE;
12440 }
12441 }
12442
12443 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12444 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12445 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12446 blackLevelLock)) {
12447 rc = BAD_VALUE;
12448 }
12449 }
12450
12451 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12452 uint8_t lensShadingMapMode =
12453 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12454 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12455 lensShadingMapMode)) {
12456 rc = BAD_VALUE;
12457 }
12458 }
12459
12460 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12461 cam_area_t roi;
12462 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012463 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012464
12465 // Map coordinate system from active array to sensor output.
12466 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12467 roi.rect.height);
12468
12469 if (scalerCropSet) {
12470 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12471 }
12472 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12473 rc = BAD_VALUE;
12474 }
12475 }
12476
12477 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12478 cam_area_t roi;
12479 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012480 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012481
12482 // Map coordinate system from active array to sensor output.
12483 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12484 roi.rect.height);
12485
12486 if (scalerCropSet) {
12487 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12488 }
12489 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12490 rc = BAD_VALUE;
12491 }
12492 }
12493
12494 // CDS for non-HFR non-video mode
12495 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12496 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12497 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12498 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12499 LOGE("Invalid CDS mode %d!", *fwk_cds);
12500 } else {
12501 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12502 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12503 rc = BAD_VALUE;
12504 }
12505 }
12506 }
12507
Thierry Strudel04e026f2016-10-10 11:27:36 -070012508 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012509 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012510 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012511 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12512 }
12513 if (m_bVideoHdrEnabled)
12514 vhdr = CAM_VIDEO_HDR_MODE_ON;
12515
Thierry Strudel54dc9782017-02-15 12:12:10 -080012516 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12517
12518 if(vhdr != curr_hdr_state)
12519 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12520
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012521 rc = setVideoHdrMode(mParameters, vhdr);
12522 if (rc != NO_ERROR) {
12523 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012524 }
12525
12526 //IR
12527 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12528 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12529 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012530 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12531 uint8_t isIRon = 0;
12532
12533 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012534 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12535 LOGE("Invalid IR mode %d!", fwk_ir);
12536 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012537 if(isIRon != curr_ir_state )
12538 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12539
Thierry Strudel04e026f2016-10-10 11:27:36 -070012540 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12541 CAM_INTF_META_IR_MODE, fwk_ir)) {
12542 rc = BAD_VALUE;
12543 }
12544 }
12545 }
12546
Thierry Strudel54dc9782017-02-15 12:12:10 -080012547 //Binning Correction Mode
12548 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12549 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12550 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12551 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12552 || (0 > fwk_binning_correction)) {
12553 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12554 } else {
12555 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12556 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12557 rc = BAD_VALUE;
12558 }
12559 }
12560 }
12561
Thierry Strudel269c81a2016-10-12 12:13:59 -070012562 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12563 float aec_speed;
12564 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12565 LOGD("AEC Speed :%f", aec_speed);
12566 if ( aec_speed < 0 ) {
12567 LOGE("Invalid AEC mode %f!", aec_speed);
12568 } else {
12569 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12570 aec_speed)) {
12571 rc = BAD_VALUE;
12572 }
12573 }
12574 }
12575
12576 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12577 float awb_speed;
12578 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12579 LOGD("AWB Speed :%f", awb_speed);
12580 if ( awb_speed < 0 ) {
12581 LOGE("Invalid AWB mode %f!", awb_speed);
12582 } else {
12583 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12584 awb_speed)) {
12585 rc = BAD_VALUE;
12586 }
12587 }
12588 }
12589
Thierry Strudel3d639192016-09-09 11:52:26 -070012590 // TNR
12591 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12592 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12593 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012594 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012595 cam_denoise_param_t tnr;
12596 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12597 tnr.process_plates =
12598 (cam_denoise_process_type_t)frame_settings.find(
12599 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12600 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012601
12602 if(b_TnrRequested != curr_tnr_state)
12603 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12604
Thierry Strudel3d639192016-09-09 11:52:26 -070012605 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12606 rc = BAD_VALUE;
12607 }
12608 }
12609
Thierry Strudel54dc9782017-02-15 12:12:10 -080012610 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012611 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012612 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012613 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12614 *exposure_metering_mode)) {
12615 rc = BAD_VALUE;
12616 }
12617 }
12618
Thierry Strudel3d639192016-09-09 11:52:26 -070012619 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12620 int32_t fwk_testPatternMode =
12621 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12622 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12623 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12624
12625 if (NAME_NOT_FOUND != testPatternMode) {
12626 cam_test_pattern_data_t testPatternData;
12627 memset(&testPatternData, 0, sizeof(testPatternData));
12628 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12629 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12630 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12631 int32_t *fwk_testPatternData =
12632 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12633 testPatternData.r = fwk_testPatternData[0];
12634 testPatternData.b = fwk_testPatternData[3];
12635 switch (gCamCapability[mCameraId]->color_arrangement) {
12636 case CAM_FILTER_ARRANGEMENT_RGGB:
12637 case CAM_FILTER_ARRANGEMENT_GRBG:
12638 testPatternData.gr = fwk_testPatternData[1];
12639 testPatternData.gb = fwk_testPatternData[2];
12640 break;
12641 case CAM_FILTER_ARRANGEMENT_GBRG:
12642 case CAM_FILTER_ARRANGEMENT_BGGR:
12643 testPatternData.gr = fwk_testPatternData[2];
12644 testPatternData.gb = fwk_testPatternData[1];
12645 break;
12646 default:
12647 LOGE("color arrangement %d is not supported",
12648 gCamCapability[mCameraId]->color_arrangement);
12649 break;
12650 }
12651 }
12652 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12653 testPatternData)) {
12654 rc = BAD_VALUE;
12655 }
12656 } else {
12657 LOGE("Invalid framework sensor test pattern mode %d",
12658 fwk_testPatternMode);
12659 }
12660 }
12661
12662 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12663 size_t count = 0;
12664 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12665 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12666 gps_coords.data.d, gps_coords.count, count);
12667 if (gps_coords.count != count) {
12668 rc = BAD_VALUE;
12669 }
12670 }
12671
12672 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12673 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12674 size_t count = 0;
12675 const char *gps_methods_src = (const char *)
12676 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12677 memset(gps_methods, '\0', sizeof(gps_methods));
12678 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12679 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12680 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12681 if (GPS_PROCESSING_METHOD_SIZE != count) {
12682 rc = BAD_VALUE;
12683 }
12684 }
12685
12686 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12687 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12688 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12689 gps_timestamp)) {
12690 rc = BAD_VALUE;
12691 }
12692 }
12693
12694 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12695 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12696 cam_rotation_info_t rotation_info;
12697 if (orientation == 0) {
12698 rotation_info.rotation = ROTATE_0;
12699 } else if (orientation == 90) {
12700 rotation_info.rotation = ROTATE_90;
12701 } else if (orientation == 180) {
12702 rotation_info.rotation = ROTATE_180;
12703 } else if (orientation == 270) {
12704 rotation_info.rotation = ROTATE_270;
12705 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012706 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012707 rotation_info.streamId = snapshotStreamId;
12708 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12709 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12710 rc = BAD_VALUE;
12711 }
12712 }
12713
12714 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12715 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12716 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12717 rc = BAD_VALUE;
12718 }
12719 }
12720
12721 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12722 uint32_t thumb_quality = (uint32_t)
12723 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12724 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12725 thumb_quality)) {
12726 rc = BAD_VALUE;
12727 }
12728 }
12729
12730 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12731 cam_dimension_t dim;
12732 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12733 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12734 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12735 rc = BAD_VALUE;
12736 }
12737 }
12738
12739 // Internal metadata
12740 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12741 size_t count = 0;
12742 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12743 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12744 privatedata.data.i32, privatedata.count, count);
12745 if (privatedata.count != count) {
12746 rc = BAD_VALUE;
12747 }
12748 }
12749
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012750 // ISO/Exposure Priority
12751 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12752 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12753 cam_priority_mode_t mode =
12754 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12755 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12756 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12757 use_iso_exp_pty.previewOnly = FALSE;
12758 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12759 use_iso_exp_pty.value = *ptr;
12760
12761 if(CAM_ISO_PRIORITY == mode) {
12762 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12763 use_iso_exp_pty)) {
12764 rc = BAD_VALUE;
12765 }
12766 }
12767 else {
12768 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12769 use_iso_exp_pty)) {
12770 rc = BAD_VALUE;
12771 }
12772 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012773
12774 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12775 rc = BAD_VALUE;
12776 }
12777 }
12778 } else {
12779 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12780 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012781 }
12782 }
12783
12784 // Saturation
12785 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12786 int32_t* use_saturation =
12787 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12788 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12789 rc = BAD_VALUE;
12790 }
12791 }
12792
Thierry Strudel3d639192016-09-09 11:52:26 -070012793 // EV step
12794 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12795 gCamCapability[mCameraId]->exp_compensation_step)) {
12796 rc = BAD_VALUE;
12797 }
12798
12799 // CDS info
12800 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12801 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12802 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12803
12804 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12805 CAM_INTF_META_CDS_DATA, *cdsData)) {
12806 rc = BAD_VALUE;
12807 }
12808 }
12809
Shuzhen Wang19463d72016-03-08 11:09:52 -080012810 // Hybrid AE
12811 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12812 uint8_t *hybrid_ae = (uint8_t *)
12813 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12814
12815 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12816 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12817 rc = BAD_VALUE;
12818 }
12819 }
12820
Shuzhen Wang14415f52016-11-16 18:26:18 -080012821 // Histogram
12822 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12823 uint8_t histogramMode =
12824 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12825 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12826 histogramMode)) {
12827 rc = BAD_VALUE;
12828 }
12829 }
12830
12831 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12832 int32_t histogramBins =
12833 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12834 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12835 histogramBins)) {
12836 rc = BAD_VALUE;
12837 }
12838 }
12839
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012840 // Tracking AF
12841 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12842 uint8_t trackingAfTrigger =
12843 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12844 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12845 trackingAfTrigger)) {
12846 rc = BAD_VALUE;
12847 }
12848 }
12849
Thierry Strudel3d639192016-09-09 11:52:26 -070012850 return rc;
12851}
12852
12853/*===========================================================================
12854 * FUNCTION : captureResultCb
12855 *
12856 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12857 *
12858 * PARAMETERS :
12859 * @frame : frame information from mm-camera-interface
12860 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12861 * @userdata: userdata
12862 *
12863 * RETURN : NONE
12864 *==========================================================================*/
12865void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12866 camera3_stream_buffer_t *buffer,
12867 uint32_t frame_number, bool isInputBuffer, void *userdata)
12868{
12869 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12870 if (hw == NULL) {
12871 LOGE("Invalid hw %p", hw);
12872 return;
12873 }
12874
12875 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12876 return;
12877}
12878
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012879/*===========================================================================
12880 * FUNCTION : setBufferErrorStatus
12881 *
12882 * DESCRIPTION: Callback handler for channels to report any buffer errors
12883 *
12884 * PARAMETERS :
12885 * @ch : Channel on which buffer error is reported from
12886 * @frame_number : frame number on which buffer error is reported on
12887 * @buffer_status : buffer error status
12888 * @userdata: userdata
12889 *
12890 * RETURN : NONE
12891 *==========================================================================*/
12892void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12893 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12894{
12895 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12896 if (hw == NULL) {
12897 LOGE("Invalid hw %p", hw);
12898 return;
12899 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012900
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012901 hw->setBufferErrorStatus(ch, frame_number, err);
12902 return;
12903}
12904
12905void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12906 uint32_t frameNumber, camera3_buffer_status_t err)
12907{
12908 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12909 pthread_mutex_lock(&mMutex);
12910
12911 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12912 if (req.frame_number != frameNumber)
12913 continue;
12914 for (auto& k : req.mPendingBufferList) {
12915 if(k.stream->priv == ch) {
12916 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12917 }
12918 }
12919 }
12920
12921 pthread_mutex_unlock(&mMutex);
12922 return;
12923}
Thierry Strudel3d639192016-09-09 11:52:26 -070012924/*===========================================================================
12925 * FUNCTION : initialize
12926 *
12927 * DESCRIPTION: Pass framework callback pointers to HAL
12928 *
12929 * PARAMETERS :
12930 *
12931 *
12932 * RETURN : Success : 0
12933 * Failure: -ENODEV
12934 *==========================================================================*/
12935
12936int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12937 const camera3_callback_ops_t *callback_ops)
12938{
12939 LOGD("E");
12940 QCamera3HardwareInterface *hw =
12941 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12942 if (!hw) {
12943 LOGE("NULL camera device");
12944 return -ENODEV;
12945 }
12946
12947 int rc = hw->initialize(callback_ops);
12948 LOGD("X");
12949 return rc;
12950}
12951
12952/*===========================================================================
12953 * FUNCTION : configure_streams
12954 *
12955 * DESCRIPTION:
12956 *
12957 * PARAMETERS :
12958 *
12959 *
12960 * RETURN : Success: 0
12961 * Failure: -EINVAL (if stream configuration is invalid)
12962 * -ENODEV (fatal error)
12963 *==========================================================================*/
12964
12965int QCamera3HardwareInterface::configure_streams(
12966 const struct camera3_device *device,
12967 camera3_stream_configuration_t *stream_list)
12968{
12969 LOGD("E");
12970 QCamera3HardwareInterface *hw =
12971 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12972 if (!hw) {
12973 LOGE("NULL camera device");
12974 return -ENODEV;
12975 }
12976 int rc = hw->configureStreams(stream_list);
12977 LOGD("X");
12978 return rc;
12979}
12980
12981/*===========================================================================
12982 * FUNCTION : construct_default_request_settings
12983 *
12984 * DESCRIPTION: Configure a settings buffer to meet the required use case
12985 *
12986 * PARAMETERS :
12987 *
12988 *
12989 * RETURN : Success: Return valid metadata
12990 * Failure: Return NULL
12991 *==========================================================================*/
12992const camera_metadata_t* QCamera3HardwareInterface::
12993 construct_default_request_settings(const struct camera3_device *device,
12994 int type)
12995{
12996
12997 LOGD("E");
12998 camera_metadata_t* fwk_metadata = NULL;
12999 QCamera3HardwareInterface *hw =
13000 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13001 if (!hw) {
13002 LOGE("NULL camera device");
13003 return NULL;
13004 }
13005
13006 fwk_metadata = hw->translateCapabilityToMetadata(type);
13007
13008 LOGD("X");
13009 return fwk_metadata;
13010}
13011
13012/*===========================================================================
13013 * FUNCTION : process_capture_request
13014 *
13015 * DESCRIPTION:
13016 *
13017 * PARAMETERS :
13018 *
13019 *
13020 * RETURN :
13021 *==========================================================================*/
13022int QCamera3HardwareInterface::process_capture_request(
13023 const struct camera3_device *device,
13024 camera3_capture_request_t *request)
13025{
13026 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013027 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013028 QCamera3HardwareInterface *hw =
13029 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13030 if (!hw) {
13031 LOGE("NULL camera device");
13032 return -EINVAL;
13033 }
13034
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013035 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013036 LOGD("X");
13037 return rc;
13038}
13039
13040/*===========================================================================
13041 * FUNCTION : dump
13042 *
13043 * DESCRIPTION:
13044 *
13045 * PARAMETERS :
13046 *
13047 *
13048 * RETURN :
13049 *==========================================================================*/
13050
13051void QCamera3HardwareInterface::dump(
13052 const struct camera3_device *device, int fd)
13053{
13054 /* Log level property is read when "adb shell dumpsys media.camera" is
13055 called so that the log level can be controlled without restarting
13056 the media server */
13057 getLogLevel();
13058
13059 LOGD("E");
13060 QCamera3HardwareInterface *hw =
13061 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13062 if (!hw) {
13063 LOGE("NULL camera device");
13064 return;
13065 }
13066
13067 hw->dump(fd);
13068 LOGD("X");
13069 return;
13070}
13071
13072/*===========================================================================
13073 * FUNCTION : flush
13074 *
13075 * DESCRIPTION:
13076 *
13077 * PARAMETERS :
13078 *
13079 *
13080 * RETURN :
13081 *==========================================================================*/
13082
13083int QCamera3HardwareInterface::flush(
13084 const struct camera3_device *device)
13085{
13086 int rc;
13087 LOGD("E");
13088 QCamera3HardwareInterface *hw =
13089 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13090 if (!hw) {
13091 LOGE("NULL camera device");
13092 return -EINVAL;
13093 }
13094
13095 pthread_mutex_lock(&hw->mMutex);
13096 // Validate current state
13097 switch (hw->mState) {
13098 case STARTED:
13099 /* valid state */
13100 break;
13101
13102 case ERROR:
13103 pthread_mutex_unlock(&hw->mMutex);
13104 hw->handleCameraDeviceError();
13105 return -ENODEV;
13106
13107 default:
13108 LOGI("Flush returned during state %d", hw->mState);
13109 pthread_mutex_unlock(&hw->mMutex);
13110 return 0;
13111 }
13112 pthread_mutex_unlock(&hw->mMutex);
13113
13114 rc = hw->flush(true /* restart channels */ );
13115 LOGD("X");
13116 return rc;
13117}
13118
13119/*===========================================================================
13120 * FUNCTION : close_camera_device
13121 *
13122 * DESCRIPTION:
13123 *
13124 * PARAMETERS :
13125 *
13126 *
13127 * RETURN :
13128 *==========================================================================*/
13129int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13130{
13131 int ret = NO_ERROR;
13132 QCamera3HardwareInterface *hw =
13133 reinterpret_cast<QCamera3HardwareInterface *>(
13134 reinterpret_cast<camera3_device_t *>(device)->priv);
13135 if (!hw) {
13136 LOGE("NULL camera device");
13137 return BAD_VALUE;
13138 }
13139
13140 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13141 delete hw;
13142 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013143 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013144 return ret;
13145}
13146
13147/*===========================================================================
13148 * FUNCTION : getWaveletDenoiseProcessPlate
13149 *
13150 * DESCRIPTION: query wavelet denoise process plate
13151 *
13152 * PARAMETERS : None
13153 *
13154 * RETURN : WNR prcocess plate value
13155 *==========================================================================*/
13156cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13157{
13158 char prop[PROPERTY_VALUE_MAX];
13159 memset(prop, 0, sizeof(prop));
13160 property_get("persist.denoise.process.plates", prop, "0");
13161 int processPlate = atoi(prop);
13162 switch(processPlate) {
13163 case 0:
13164 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13165 case 1:
13166 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13167 case 2:
13168 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13169 case 3:
13170 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13171 default:
13172 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13173 }
13174}
13175
13176
13177/*===========================================================================
13178 * FUNCTION : getTemporalDenoiseProcessPlate
13179 *
13180 * DESCRIPTION: query temporal denoise process plate
13181 *
13182 * PARAMETERS : None
13183 *
13184 * RETURN : TNR prcocess plate value
13185 *==========================================================================*/
13186cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13187{
13188 char prop[PROPERTY_VALUE_MAX];
13189 memset(prop, 0, sizeof(prop));
13190 property_get("persist.tnr.process.plates", prop, "0");
13191 int processPlate = atoi(prop);
13192 switch(processPlate) {
13193 case 0:
13194 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13195 case 1:
13196 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13197 case 2:
13198 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13199 case 3:
13200 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13201 default:
13202 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13203 }
13204}
13205
13206
13207/*===========================================================================
13208 * FUNCTION : extractSceneMode
13209 *
13210 * DESCRIPTION: Extract scene mode from frameworks set metadata
13211 *
13212 * PARAMETERS :
13213 * @frame_settings: CameraMetadata reference
13214 * @metaMode: ANDROID_CONTORL_MODE
13215 * @hal_metadata: hal metadata structure
13216 *
13217 * RETURN : None
13218 *==========================================================================*/
13219int32_t QCamera3HardwareInterface::extractSceneMode(
13220 const CameraMetadata &frame_settings, uint8_t metaMode,
13221 metadata_buffer_t *hal_metadata)
13222{
13223 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013224 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13225
13226 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13227 LOGD("Ignoring control mode OFF_KEEP_STATE");
13228 return NO_ERROR;
13229 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013230
13231 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13232 camera_metadata_ro_entry entry =
13233 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13234 if (0 == entry.count)
13235 return rc;
13236
13237 uint8_t fwk_sceneMode = entry.data.u8[0];
13238
13239 int val = lookupHalName(SCENE_MODES_MAP,
13240 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13241 fwk_sceneMode);
13242 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013243 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013244 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013245 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013246 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013247
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013248 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13249 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13250 }
13251
13252 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13253 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013254 cam_hdr_param_t hdr_params;
13255 hdr_params.hdr_enable = 1;
13256 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13257 hdr_params.hdr_need_1x = false;
13258 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13259 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13260 rc = BAD_VALUE;
13261 }
13262 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013263
Thierry Strudel3d639192016-09-09 11:52:26 -070013264 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13265 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13266 rc = BAD_VALUE;
13267 }
13268 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013269
13270 if (mForceHdrSnapshot) {
13271 cam_hdr_param_t hdr_params;
13272 hdr_params.hdr_enable = 1;
13273 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13274 hdr_params.hdr_need_1x = false;
13275 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13276 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13277 rc = BAD_VALUE;
13278 }
13279 }
13280
Thierry Strudel3d639192016-09-09 11:52:26 -070013281 return rc;
13282}
13283
13284/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013285 * FUNCTION : setVideoHdrMode
13286 *
13287 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13288 *
13289 * PARAMETERS :
13290 * @hal_metadata: hal metadata structure
13291 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13292 *
13293 * RETURN : None
13294 *==========================================================================*/
13295int32_t QCamera3HardwareInterface::setVideoHdrMode(
13296 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13297{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013298 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13299 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13300 }
13301
13302 LOGE("Invalid Video HDR mode %d!", vhdr);
13303 return BAD_VALUE;
13304}
13305
13306/*===========================================================================
13307 * FUNCTION : setSensorHDR
13308 *
13309 * DESCRIPTION: Enable/disable sensor HDR.
13310 *
13311 * PARAMETERS :
13312 * @hal_metadata: hal metadata structure
13313 * @enable: boolean whether to enable/disable sensor HDR
13314 *
13315 * RETURN : None
13316 *==========================================================================*/
13317int32_t QCamera3HardwareInterface::setSensorHDR(
13318 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13319{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013320 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013321 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13322
13323 if (enable) {
13324 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13325 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13326 #ifdef _LE_CAMERA_
13327 //Default to staggered HDR for IOT
13328 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13329 #else
13330 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13331 #endif
13332 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13333 }
13334
13335 bool isSupported = false;
13336 switch (sensor_hdr) {
13337 case CAM_SENSOR_HDR_IN_SENSOR:
13338 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13339 CAM_QCOM_FEATURE_SENSOR_HDR) {
13340 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013341 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013342 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013343 break;
13344 case CAM_SENSOR_HDR_ZIGZAG:
13345 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13346 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13347 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013348 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013349 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013350 break;
13351 case CAM_SENSOR_HDR_STAGGERED:
13352 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13353 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13354 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013355 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013356 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013357 break;
13358 case CAM_SENSOR_HDR_OFF:
13359 isSupported = true;
13360 LOGD("Turning off sensor HDR");
13361 break;
13362 default:
13363 LOGE("HDR mode %d not supported", sensor_hdr);
13364 rc = BAD_VALUE;
13365 break;
13366 }
13367
13368 if(isSupported) {
13369 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13370 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13371 rc = BAD_VALUE;
13372 } else {
13373 if(!isVideoHdrEnable)
13374 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013375 }
13376 }
13377 return rc;
13378}
13379
13380/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013381 * FUNCTION : needRotationReprocess
13382 *
13383 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13384 *
13385 * PARAMETERS : none
13386 *
13387 * RETURN : true: needed
13388 * false: no need
13389 *==========================================================================*/
13390bool QCamera3HardwareInterface::needRotationReprocess()
13391{
13392 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13393 // current rotation is not zero, and pp has the capability to process rotation
13394 LOGH("need do reprocess for rotation");
13395 return true;
13396 }
13397
13398 return false;
13399}
13400
13401/*===========================================================================
13402 * FUNCTION : needReprocess
13403 *
13404 * DESCRIPTION: if reprocess in needed
13405 *
13406 * PARAMETERS : none
13407 *
13408 * RETURN : true: needed
13409 * false: no need
13410 *==========================================================================*/
13411bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13412{
13413 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13414 // TODO: add for ZSL HDR later
13415 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13416 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13417 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13418 return true;
13419 } else {
13420 LOGH("already post processed frame");
13421 return false;
13422 }
13423 }
13424 return needRotationReprocess();
13425}
13426
13427/*===========================================================================
13428 * FUNCTION : needJpegExifRotation
13429 *
13430 * DESCRIPTION: if rotation from jpeg is needed
13431 *
13432 * PARAMETERS : none
13433 *
13434 * RETURN : true: needed
13435 * false: no need
13436 *==========================================================================*/
13437bool QCamera3HardwareInterface::needJpegExifRotation()
13438{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013439 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013440 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13441 LOGD("Need use Jpeg EXIF Rotation");
13442 return true;
13443 }
13444 return false;
13445}
13446
13447/*===========================================================================
13448 * FUNCTION : addOfflineReprocChannel
13449 *
13450 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13451 * coming from input channel
13452 *
13453 * PARAMETERS :
13454 * @config : reprocess configuration
13455 * @inputChHandle : pointer to the input (source) channel
13456 *
13457 *
13458 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13459 *==========================================================================*/
13460QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13461 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13462{
13463 int32_t rc = NO_ERROR;
13464 QCamera3ReprocessChannel *pChannel = NULL;
13465
13466 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013467 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13468 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013469 if (NULL == pChannel) {
13470 LOGE("no mem for reprocess channel");
13471 return NULL;
13472 }
13473
13474 rc = pChannel->initialize(IS_TYPE_NONE);
13475 if (rc != NO_ERROR) {
13476 LOGE("init reprocess channel failed, ret = %d", rc);
13477 delete pChannel;
13478 return NULL;
13479 }
13480
13481 // pp feature config
13482 cam_pp_feature_config_t pp_config;
13483 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13484
13485 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13486 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13487 & CAM_QCOM_FEATURE_DSDN) {
13488 //Use CPP CDS incase h/w supports it.
13489 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13490 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13491 }
13492 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13493 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13494 }
13495
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013496 if (config.hdr_param.hdr_enable) {
13497 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13498 pp_config.hdr_param = config.hdr_param;
13499 }
13500
13501 if (mForceHdrSnapshot) {
13502 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13503 pp_config.hdr_param.hdr_enable = 1;
13504 pp_config.hdr_param.hdr_need_1x = 0;
13505 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13506 }
13507
Thierry Strudel3d639192016-09-09 11:52:26 -070013508 rc = pChannel->addReprocStreamsFromSource(pp_config,
13509 config,
13510 IS_TYPE_NONE,
13511 mMetadataChannel);
13512
13513 if (rc != NO_ERROR) {
13514 delete pChannel;
13515 return NULL;
13516 }
13517 return pChannel;
13518}
13519
13520/*===========================================================================
13521 * FUNCTION : getMobicatMask
13522 *
13523 * DESCRIPTION: returns mobicat mask
13524 *
13525 * PARAMETERS : none
13526 *
13527 * RETURN : mobicat mask
13528 *
13529 *==========================================================================*/
13530uint8_t QCamera3HardwareInterface::getMobicatMask()
13531{
13532 return m_MobicatMask;
13533}
13534
13535/*===========================================================================
13536 * FUNCTION : setMobicat
13537 *
13538 * DESCRIPTION: set Mobicat on/off.
13539 *
13540 * PARAMETERS :
13541 * @params : none
13542 *
13543 * RETURN : int32_t type of status
13544 * NO_ERROR -- success
13545 * none-zero failure code
13546 *==========================================================================*/
13547int32_t QCamera3HardwareInterface::setMobicat()
13548{
13549 char value [PROPERTY_VALUE_MAX];
13550 property_get("persist.camera.mobicat", value, "0");
13551 int32_t ret = NO_ERROR;
13552 uint8_t enableMobi = (uint8_t)atoi(value);
13553
13554 if (enableMobi) {
13555 tune_cmd_t tune_cmd;
13556 tune_cmd.type = SET_RELOAD_CHROMATIX;
13557 tune_cmd.module = MODULE_ALL;
13558 tune_cmd.value = TRUE;
13559 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13560 CAM_INTF_PARM_SET_VFE_COMMAND,
13561 tune_cmd);
13562
13563 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13564 CAM_INTF_PARM_SET_PP_COMMAND,
13565 tune_cmd);
13566 }
13567 m_MobicatMask = enableMobi;
13568
13569 return ret;
13570}
13571
13572/*===========================================================================
13573* FUNCTION : getLogLevel
13574*
13575* DESCRIPTION: Reads the log level property into a variable
13576*
13577* PARAMETERS :
13578* None
13579*
13580* RETURN :
13581* None
13582*==========================================================================*/
13583void QCamera3HardwareInterface::getLogLevel()
13584{
13585 char prop[PROPERTY_VALUE_MAX];
13586 uint32_t globalLogLevel = 0;
13587
13588 property_get("persist.camera.hal.debug", prop, "0");
13589 int val = atoi(prop);
13590 if (0 <= val) {
13591 gCamHal3LogLevel = (uint32_t)val;
13592 }
13593
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013594 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013595 gKpiDebugLevel = atoi(prop);
13596
13597 property_get("persist.camera.global.debug", prop, "0");
13598 val = atoi(prop);
13599 if (0 <= val) {
13600 globalLogLevel = (uint32_t)val;
13601 }
13602
13603 /* Highest log level among hal.logs and global.logs is selected */
13604 if (gCamHal3LogLevel < globalLogLevel)
13605 gCamHal3LogLevel = globalLogLevel;
13606
13607 return;
13608}
13609
13610/*===========================================================================
13611 * FUNCTION : validateStreamRotations
13612 *
13613 * DESCRIPTION: Check if the rotations requested are supported
13614 *
13615 * PARAMETERS :
13616 * @stream_list : streams to be configured
13617 *
13618 * RETURN : NO_ERROR on success
13619 * -EINVAL on failure
13620 *
13621 *==========================================================================*/
13622int QCamera3HardwareInterface::validateStreamRotations(
13623 camera3_stream_configuration_t *streamList)
13624{
13625 int rc = NO_ERROR;
13626
13627 /*
13628 * Loop through all streams requested in configuration
13629 * Check if unsupported rotations have been requested on any of them
13630 */
13631 for (size_t j = 0; j < streamList->num_streams; j++){
13632 camera3_stream_t *newStream = streamList->streams[j];
13633
13634 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13635 bool isImplDef = (newStream->format ==
13636 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13637 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13638 isImplDef);
13639
13640 if (isRotated && (!isImplDef || isZsl)) {
13641 LOGE("Error: Unsupported rotation of %d requested for stream"
13642 "type:%d and stream format:%d",
13643 newStream->rotation, newStream->stream_type,
13644 newStream->format);
13645 rc = -EINVAL;
13646 break;
13647 }
13648 }
13649
13650 return rc;
13651}
13652
13653/*===========================================================================
13654* FUNCTION : getFlashInfo
13655*
13656* DESCRIPTION: Retrieve information about whether the device has a flash.
13657*
13658* PARAMETERS :
13659* @cameraId : Camera id to query
13660* @hasFlash : Boolean indicating whether there is a flash device
13661* associated with given camera
13662* @flashNode : If a flash device exists, this will be its device node.
13663*
13664* RETURN :
13665* None
13666*==========================================================================*/
13667void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13668 bool& hasFlash,
13669 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13670{
13671 cam_capability_t* camCapability = gCamCapability[cameraId];
13672 if (NULL == camCapability) {
13673 hasFlash = false;
13674 flashNode[0] = '\0';
13675 } else {
13676 hasFlash = camCapability->flash_available;
13677 strlcpy(flashNode,
13678 (char*)camCapability->flash_dev_name,
13679 QCAMERA_MAX_FILEPATH_LENGTH);
13680 }
13681}
13682
13683/*===========================================================================
13684* FUNCTION : getEepromVersionInfo
13685*
13686* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13687*
13688* PARAMETERS : None
13689*
13690* RETURN : string describing EEPROM version
13691* "\0" if no such info available
13692*==========================================================================*/
13693const char *QCamera3HardwareInterface::getEepromVersionInfo()
13694{
13695 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13696}
13697
13698/*===========================================================================
13699* FUNCTION : getLdafCalib
13700*
13701* DESCRIPTION: Retrieve Laser AF calibration data
13702*
13703* PARAMETERS : None
13704*
13705* RETURN : Two uint32_t describing laser AF calibration data
13706* NULL if none is available.
13707*==========================================================================*/
13708const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13709{
13710 if (mLdafCalibExist) {
13711 return &mLdafCalib[0];
13712 } else {
13713 return NULL;
13714 }
13715}
13716
13717/*===========================================================================
13718 * FUNCTION : dynamicUpdateMetaStreamInfo
13719 *
13720 * DESCRIPTION: This function:
13721 * (1) stops all the channels
13722 * (2) returns error on pending requests and buffers
13723 * (3) sends metastream_info in setparams
13724 * (4) starts all channels
13725 * This is useful when sensor has to be restarted to apply any
13726 * settings such as frame rate from a different sensor mode
13727 *
13728 * PARAMETERS : None
13729 *
13730 * RETURN : NO_ERROR on success
13731 * Error codes on failure
13732 *
13733 *==========================================================================*/
13734int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13735{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013736 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013737 int rc = NO_ERROR;
13738
13739 LOGD("E");
13740
13741 rc = stopAllChannels();
13742 if (rc < 0) {
13743 LOGE("stopAllChannels failed");
13744 return rc;
13745 }
13746
13747 rc = notifyErrorForPendingRequests();
13748 if (rc < 0) {
13749 LOGE("notifyErrorForPendingRequests failed");
13750 return rc;
13751 }
13752
13753 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13754 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13755 "Format:%d",
13756 mStreamConfigInfo.type[i],
13757 mStreamConfigInfo.stream_sizes[i].width,
13758 mStreamConfigInfo.stream_sizes[i].height,
13759 mStreamConfigInfo.postprocess_mask[i],
13760 mStreamConfigInfo.format[i]);
13761 }
13762
13763 /* Send meta stream info once again so that ISP can start */
13764 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13765 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13766 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13767 mParameters);
13768 if (rc < 0) {
13769 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13770 }
13771
13772 rc = startAllChannels();
13773 if (rc < 0) {
13774 LOGE("startAllChannels failed");
13775 return rc;
13776 }
13777
13778 LOGD("X");
13779 return rc;
13780}
13781
13782/*===========================================================================
13783 * FUNCTION : stopAllChannels
13784 *
13785 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13786 *
13787 * PARAMETERS : None
13788 *
13789 * RETURN : NO_ERROR on success
13790 * Error codes on failure
13791 *
13792 *==========================================================================*/
13793int32_t QCamera3HardwareInterface::stopAllChannels()
13794{
13795 int32_t rc = NO_ERROR;
13796
13797 LOGD("Stopping all channels");
13798 // Stop the Streams/Channels
13799 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13800 it != mStreamInfo.end(); it++) {
13801 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13802 if (channel) {
13803 channel->stop();
13804 }
13805 (*it)->status = INVALID;
13806 }
13807
13808 if (mSupportChannel) {
13809 mSupportChannel->stop();
13810 }
13811 if (mAnalysisChannel) {
13812 mAnalysisChannel->stop();
13813 }
13814 if (mRawDumpChannel) {
13815 mRawDumpChannel->stop();
13816 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013817 if (mHdrPlusRawSrcChannel) {
13818 mHdrPlusRawSrcChannel->stop();
13819 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013820 if (mMetadataChannel) {
13821 /* If content of mStreamInfo is not 0, there is metadata stream */
13822 mMetadataChannel->stop();
13823 }
13824
13825 LOGD("All channels stopped");
13826 return rc;
13827}
13828
13829/*===========================================================================
13830 * FUNCTION : startAllChannels
13831 *
13832 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13833 *
13834 * PARAMETERS : None
13835 *
13836 * RETURN : NO_ERROR on success
13837 * Error codes on failure
13838 *
13839 *==========================================================================*/
13840int32_t QCamera3HardwareInterface::startAllChannels()
13841{
13842 int32_t rc = NO_ERROR;
13843
13844 LOGD("Start all channels ");
13845 // Start the Streams/Channels
13846 if (mMetadataChannel) {
13847 /* If content of mStreamInfo is not 0, there is metadata stream */
13848 rc = mMetadataChannel->start();
13849 if (rc < 0) {
13850 LOGE("META channel start failed");
13851 return rc;
13852 }
13853 }
13854 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13855 it != mStreamInfo.end(); it++) {
13856 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13857 if (channel) {
13858 rc = channel->start();
13859 if (rc < 0) {
13860 LOGE("channel start failed");
13861 return rc;
13862 }
13863 }
13864 }
13865 if (mAnalysisChannel) {
13866 mAnalysisChannel->start();
13867 }
13868 if (mSupportChannel) {
13869 rc = mSupportChannel->start();
13870 if (rc < 0) {
13871 LOGE("Support channel start failed");
13872 return rc;
13873 }
13874 }
13875 if (mRawDumpChannel) {
13876 rc = mRawDumpChannel->start();
13877 if (rc < 0) {
13878 LOGE("RAW dump channel start failed");
13879 return rc;
13880 }
13881 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013882 if (mHdrPlusRawSrcChannel) {
13883 rc = mHdrPlusRawSrcChannel->start();
13884 if (rc < 0) {
13885 LOGE("HDR+ RAW channel start failed");
13886 return rc;
13887 }
13888 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013889
13890 LOGD("All channels started");
13891 return rc;
13892}
13893
13894/*===========================================================================
13895 * FUNCTION : notifyErrorForPendingRequests
13896 *
13897 * DESCRIPTION: This function sends error for all the pending requests/buffers
13898 *
13899 * PARAMETERS : None
13900 *
13901 * RETURN : Error codes
13902 * NO_ERROR on success
13903 *
13904 *==========================================================================*/
13905int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13906{
13907 int32_t rc = NO_ERROR;
13908 unsigned int frameNum = 0;
13909 camera3_capture_result_t result;
13910 camera3_stream_buffer_t *pStream_Buf = NULL;
13911
13912 memset(&result, 0, sizeof(camera3_capture_result_t));
13913
13914 if (mPendingRequestsList.size() > 0) {
13915 pendingRequestIterator i = mPendingRequestsList.begin();
13916 frameNum = i->frame_number;
13917 } else {
13918 /* There might still be pending buffers even though there are
13919 no pending requests. Setting the frameNum to MAX so that
13920 all the buffers with smaller frame numbers are returned */
13921 frameNum = UINT_MAX;
13922 }
13923
13924 LOGH("Oldest frame num on mPendingRequestsList = %u",
13925 frameNum);
13926
Emilian Peev7650c122017-01-19 08:24:33 -080013927 notifyErrorFoPendingDepthData(mDepthChannel);
13928
Thierry Strudel3d639192016-09-09 11:52:26 -070013929 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13930 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13931
13932 if (req->frame_number < frameNum) {
13933 // Send Error notify to frameworks for each buffer for which
13934 // metadata buffer is already sent
13935 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13936 req->frame_number, req->mPendingBufferList.size());
13937
13938 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13939 if (NULL == pStream_Buf) {
13940 LOGE("No memory for pending buffers array");
13941 return NO_MEMORY;
13942 }
13943 memset(pStream_Buf, 0,
13944 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13945 result.result = NULL;
13946 result.frame_number = req->frame_number;
13947 result.num_output_buffers = req->mPendingBufferList.size();
13948 result.output_buffers = pStream_Buf;
13949
13950 size_t index = 0;
13951 for (auto info = req->mPendingBufferList.begin();
13952 info != req->mPendingBufferList.end(); ) {
13953
13954 camera3_notify_msg_t notify_msg;
13955 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13956 notify_msg.type = CAMERA3_MSG_ERROR;
13957 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13958 notify_msg.message.error.error_stream = info->stream;
13959 notify_msg.message.error.frame_number = req->frame_number;
13960 pStream_Buf[index].acquire_fence = -1;
13961 pStream_Buf[index].release_fence = -1;
13962 pStream_Buf[index].buffer = info->buffer;
13963 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13964 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013965 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013966 index++;
13967 // Remove buffer from list
13968 info = req->mPendingBufferList.erase(info);
13969 }
13970
13971 // Remove this request from Map
13972 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13973 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13974 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13975
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013976 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013977
13978 delete [] pStream_Buf;
13979 } else {
13980
13981 // Go through the pending requests info and send error request to framework
13982 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13983
13984 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13985
13986 // Send error notify to frameworks
13987 camera3_notify_msg_t notify_msg;
13988 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13989 notify_msg.type = CAMERA3_MSG_ERROR;
13990 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13991 notify_msg.message.error.error_stream = NULL;
13992 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013993 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013994
13995 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13996 if (NULL == pStream_Buf) {
13997 LOGE("No memory for pending buffers array");
13998 return NO_MEMORY;
13999 }
14000 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
14001
14002 result.result = NULL;
14003 result.frame_number = req->frame_number;
14004 result.input_buffer = i->input_buffer;
14005 result.num_output_buffers = req->mPendingBufferList.size();
14006 result.output_buffers = pStream_Buf;
14007
14008 size_t index = 0;
14009 for (auto info = req->mPendingBufferList.begin();
14010 info != req->mPendingBufferList.end(); ) {
14011 pStream_Buf[index].acquire_fence = -1;
14012 pStream_Buf[index].release_fence = -1;
14013 pStream_Buf[index].buffer = info->buffer;
14014 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
14015 pStream_Buf[index].stream = info->stream;
14016 index++;
14017 // Remove buffer from list
14018 info = req->mPendingBufferList.erase(info);
14019 }
14020
14021 // Remove this request from Map
14022 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
14023 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
14024 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
14025
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014026 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014027 delete [] pStream_Buf;
14028 i = erasePendingRequest(i);
14029 }
14030 }
14031
14032 /* Reset pending frame Drop list and requests list */
14033 mPendingFrameDropList.clear();
14034
14035 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
14036 req.mPendingBufferList.clear();
14037 }
14038 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014039 LOGH("Cleared all the pending buffers ");
14040
14041 return rc;
14042}
14043
14044bool QCamera3HardwareInterface::isOnEncoder(
14045 const cam_dimension_t max_viewfinder_size,
14046 uint32_t width, uint32_t height)
14047{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014048 return ((width > (uint32_t)max_viewfinder_size.width) ||
14049 (height > (uint32_t)max_viewfinder_size.height) ||
14050 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14051 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014052}
14053
14054/*===========================================================================
14055 * FUNCTION : setBundleInfo
14056 *
14057 * DESCRIPTION: Set bundle info for all streams that are bundle.
14058 *
14059 * PARAMETERS : None
14060 *
14061 * RETURN : NO_ERROR on success
14062 * Error codes on failure
14063 *==========================================================================*/
14064int32_t QCamera3HardwareInterface::setBundleInfo()
14065{
14066 int32_t rc = NO_ERROR;
14067
14068 if (mChannelHandle) {
14069 cam_bundle_config_t bundleInfo;
14070 memset(&bundleInfo, 0, sizeof(bundleInfo));
14071 rc = mCameraHandle->ops->get_bundle_info(
14072 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14073 if (rc != NO_ERROR) {
14074 LOGE("get_bundle_info failed");
14075 return rc;
14076 }
14077 if (mAnalysisChannel) {
14078 mAnalysisChannel->setBundleInfo(bundleInfo);
14079 }
14080 if (mSupportChannel) {
14081 mSupportChannel->setBundleInfo(bundleInfo);
14082 }
14083 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14084 it != mStreamInfo.end(); it++) {
14085 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14086 channel->setBundleInfo(bundleInfo);
14087 }
14088 if (mRawDumpChannel) {
14089 mRawDumpChannel->setBundleInfo(bundleInfo);
14090 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014091 if (mHdrPlusRawSrcChannel) {
14092 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14093 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014094 }
14095
14096 return rc;
14097}
14098
14099/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014100 * FUNCTION : setInstantAEC
14101 *
14102 * DESCRIPTION: Set Instant AEC related params.
14103 *
14104 * PARAMETERS :
14105 * @meta: CameraMetadata reference
14106 *
14107 * RETURN : NO_ERROR on success
14108 * Error codes on failure
14109 *==========================================================================*/
14110int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14111{
14112 int32_t rc = NO_ERROR;
14113 uint8_t val = 0;
14114 char prop[PROPERTY_VALUE_MAX];
14115
14116 // First try to configure instant AEC from framework metadata
14117 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14118 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14119 }
14120
14121 // If framework did not set this value, try to read from set prop.
14122 if (val == 0) {
14123 memset(prop, 0, sizeof(prop));
14124 property_get("persist.camera.instant.aec", prop, "0");
14125 val = (uint8_t)atoi(prop);
14126 }
14127
14128 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14129 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14130 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14131 mInstantAEC = val;
14132 mInstantAECSettledFrameNumber = 0;
14133 mInstantAecFrameIdxCount = 0;
14134 LOGH("instantAEC value set %d",val);
14135 if (mInstantAEC) {
14136 memset(prop, 0, sizeof(prop));
14137 property_get("persist.camera.ae.instant.bound", prop, "10");
14138 int32_t aec_frame_skip_cnt = atoi(prop);
14139 if (aec_frame_skip_cnt >= 0) {
14140 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14141 } else {
14142 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14143 rc = BAD_VALUE;
14144 }
14145 }
14146 } else {
14147 LOGE("Bad instant aec value set %d", val);
14148 rc = BAD_VALUE;
14149 }
14150 return rc;
14151}
14152
14153/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014154 * FUNCTION : get_num_overall_buffers
14155 *
14156 * DESCRIPTION: Estimate number of pending buffers across all requests.
14157 *
14158 * PARAMETERS : None
14159 *
14160 * RETURN : Number of overall pending buffers
14161 *
14162 *==========================================================================*/
14163uint32_t PendingBuffersMap::get_num_overall_buffers()
14164{
14165 uint32_t sum_buffers = 0;
14166 for (auto &req : mPendingBuffersInRequest) {
14167 sum_buffers += req.mPendingBufferList.size();
14168 }
14169 return sum_buffers;
14170}
14171
14172/*===========================================================================
14173 * FUNCTION : removeBuf
14174 *
14175 * DESCRIPTION: Remove a matching buffer from tracker.
14176 *
14177 * PARAMETERS : @buffer: image buffer for the callback
14178 *
14179 * RETURN : None
14180 *
14181 *==========================================================================*/
14182void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14183{
14184 bool buffer_found = false;
14185 for (auto req = mPendingBuffersInRequest.begin();
14186 req != mPendingBuffersInRequest.end(); req++) {
14187 for (auto k = req->mPendingBufferList.begin();
14188 k != req->mPendingBufferList.end(); k++ ) {
14189 if (k->buffer == buffer) {
14190 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14191 req->frame_number, buffer);
14192 k = req->mPendingBufferList.erase(k);
14193 if (req->mPendingBufferList.empty()) {
14194 // Remove this request from Map
14195 req = mPendingBuffersInRequest.erase(req);
14196 }
14197 buffer_found = true;
14198 break;
14199 }
14200 }
14201 if (buffer_found) {
14202 break;
14203 }
14204 }
14205 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14206 get_num_overall_buffers());
14207}
14208
14209/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014210 * FUNCTION : getBufErrStatus
14211 *
14212 * DESCRIPTION: get buffer error status
14213 *
14214 * PARAMETERS : @buffer: buffer handle
14215 *
14216 * RETURN : Error status
14217 *
14218 *==========================================================================*/
14219int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14220{
14221 for (auto& req : mPendingBuffersInRequest) {
14222 for (auto& k : req.mPendingBufferList) {
14223 if (k.buffer == buffer)
14224 return k.bufStatus;
14225 }
14226 }
14227 return CAMERA3_BUFFER_STATUS_OK;
14228}
14229
14230/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014231 * FUNCTION : setPAAFSupport
14232 *
14233 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14234 * feature mask according to stream type and filter
14235 * arrangement
14236 *
14237 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14238 * @stream_type: stream type
14239 * @filter_arrangement: filter arrangement
14240 *
14241 * RETURN : None
14242 *==========================================================================*/
14243void QCamera3HardwareInterface::setPAAFSupport(
14244 cam_feature_mask_t& feature_mask,
14245 cam_stream_type_t stream_type,
14246 cam_color_filter_arrangement_t filter_arrangement)
14247{
Thierry Strudel3d639192016-09-09 11:52:26 -070014248 switch (filter_arrangement) {
14249 case CAM_FILTER_ARRANGEMENT_RGGB:
14250 case CAM_FILTER_ARRANGEMENT_GRBG:
14251 case CAM_FILTER_ARRANGEMENT_GBRG:
14252 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014253 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14254 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014255 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014256 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14257 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014258 }
14259 break;
14260 case CAM_FILTER_ARRANGEMENT_Y:
14261 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14262 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14263 }
14264 break;
14265 default:
14266 break;
14267 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014268 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14269 feature_mask, stream_type, filter_arrangement);
14270
14271
Thierry Strudel3d639192016-09-09 11:52:26 -070014272}
14273
14274/*===========================================================================
14275* FUNCTION : getSensorMountAngle
14276*
14277* DESCRIPTION: Retrieve sensor mount angle
14278*
14279* PARAMETERS : None
14280*
14281* RETURN : sensor mount angle in uint32_t
14282*==========================================================================*/
14283uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14284{
14285 return gCamCapability[mCameraId]->sensor_mount_angle;
14286}
14287
14288/*===========================================================================
14289* FUNCTION : getRelatedCalibrationData
14290*
14291* DESCRIPTION: Retrieve related system calibration data
14292*
14293* PARAMETERS : None
14294*
14295* RETURN : Pointer of related system calibration data
14296*==========================================================================*/
14297const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14298{
14299 return (const cam_related_system_calibration_data_t *)
14300 &(gCamCapability[mCameraId]->related_cam_calibration);
14301}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014302
14303/*===========================================================================
14304 * FUNCTION : is60HzZone
14305 *
14306 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14307 *
14308 * PARAMETERS : None
14309 *
14310 * RETURN : True if in 60Hz zone, False otherwise
14311 *==========================================================================*/
14312bool QCamera3HardwareInterface::is60HzZone()
14313{
14314 time_t t = time(NULL);
14315 struct tm lt;
14316
14317 struct tm* r = localtime_r(&t, &lt);
14318
14319 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14320 return true;
14321 else
14322 return false;
14323}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014324
14325/*===========================================================================
14326 * FUNCTION : adjustBlackLevelForCFA
14327 *
14328 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14329 * of bayer CFA (Color Filter Array).
14330 *
14331 * PARAMETERS : @input: black level pattern in the order of RGGB
14332 * @output: black level pattern in the order of CFA
14333 * @color_arrangement: CFA color arrangement
14334 *
14335 * RETURN : None
14336 *==========================================================================*/
14337template<typename T>
14338void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14339 T input[BLACK_LEVEL_PATTERN_CNT],
14340 T output[BLACK_LEVEL_PATTERN_CNT],
14341 cam_color_filter_arrangement_t color_arrangement)
14342{
14343 switch (color_arrangement) {
14344 case CAM_FILTER_ARRANGEMENT_GRBG:
14345 output[0] = input[1];
14346 output[1] = input[0];
14347 output[2] = input[3];
14348 output[3] = input[2];
14349 break;
14350 case CAM_FILTER_ARRANGEMENT_GBRG:
14351 output[0] = input[2];
14352 output[1] = input[3];
14353 output[2] = input[0];
14354 output[3] = input[1];
14355 break;
14356 case CAM_FILTER_ARRANGEMENT_BGGR:
14357 output[0] = input[3];
14358 output[1] = input[2];
14359 output[2] = input[1];
14360 output[3] = input[0];
14361 break;
14362 case CAM_FILTER_ARRANGEMENT_RGGB:
14363 output[0] = input[0];
14364 output[1] = input[1];
14365 output[2] = input[2];
14366 output[3] = input[3];
14367 break;
14368 default:
14369 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14370 break;
14371 }
14372}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014373
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014374void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14375 CameraMetadata &resultMetadata,
14376 std::shared_ptr<metadata_buffer_t> settings)
14377{
14378 if (settings == nullptr) {
14379 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14380 return;
14381 }
14382
14383 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14384 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14385 }
14386
14387 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14388 String8 str((const char *)gps_methods);
14389 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14390 }
14391
14392 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14393 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14394 }
14395
14396 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14397 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14398 }
14399
14400 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14401 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14402 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14403 }
14404
14405 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14406 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14407 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14408 }
14409
14410 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14411 int32_t fwk_thumb_size[2];
14412 fwk_thumb_size[0] = thumb_size->width;
14413 fwk_thumb_size[1] = thumb_size->height;
14414 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14415 }
14416
14417 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14418 uint8_t fwk_intent = intent[0];
14419 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14420 }
14421}
14422
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014423bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14424 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14425 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014426{
14427 if (hdrPlusRequest == nullptr) return false;
14428
14429 // Check noise reduction mode is high quality.
14430 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14431 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14432 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014433 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14434 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014435 return false;
14436 }
14437
14438 // Check edge mode is high quality.
14439 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14440 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14441 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14442 return false;
14443 }
14444
14445 if (request.num_output_buffers != 1 ||
14446 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14447 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014448 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14449 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14450 request.output_buffers[0].stream->width,
14451 request.output_buffers[0].stream->height,
14452 request.output_buffers[0].stream->format);
14453 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014454 return false;
14455 }
14456
14457 // Get a YUV buffer from pic channel.
14458 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14459 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14460 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14461 if (res != OK) {
14462 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14463 __FUNCTION__, strerror(-res), res);
14464 return false;
14465 }
14466
14467 pbcamera::StreamBuffer buffer;
14468 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014469 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014470 buffer.data = yuvBuffer->buffer;
14471 buffer.dataSize = yuvBuffer->frame_len;
14472
14473 pbcamera::CaptureRequest pbRequest;
14474 pbRequest.id = request.frame_number;
14475 pbRequest.outputBuffers.push_back(buffer);
14476
14477 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014478 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014479 if (res != OK) {
14480 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14481 strerror(-res), res);
14482 return false;
14483 }
14484
14485 hdrPlusRequest->yuvBuffer = yuvBuffer;
14486 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14487
14488 return true;
14489}
14490
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014491status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14492{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014493 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14494 return OK;
14495 }
14496
14497 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14498 if (res != OK) {
14499 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14500 strerror(-res), res);
14501 return res;
14502 }
14503 gHdrPlusClientOpening = true;
14504
14505 return OK;
14506}
14507
Chien-Yu Chenee335912017-02-09 17:53:20 -080014508status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14509{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014510 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014511
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014512 // Check if gHdrPlusClient is opened or being opened.
14513 if (gHdrPlusClient == nullptr) {
14514 if (gHdrPlusClientOpening) {
14515 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14516 return OK;
14517 }
14518
14519 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014520 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014521 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14522 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014523 return res;
14524 }
14525
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014526 // When opening HDR+ client completes, HDR+ mode will be enabled.
14527 return OK;
14528
Chien-Yu Chenee335912017-02-09 17:53:20 -080014529 }
14530
14531 // Configure stream for HDR+.
14532 res = configureHdrPlusStreamsLocked();
14533 if (res != OK) {
14534 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014535 return res;
14536 }
14537
14538 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14539 res = gHdrPlusClient->setZslHdrPlusMode(true);
14540 if (res != OK) {
14541 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014542 return res;
14543 }
14544
14545 mHdrPlusModeEnabled = true;
14546 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14547
14548 return OK;
14549}
14550
14551void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14552{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014553 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014554 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014555 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14556 if (res != OK) {
14557 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14558 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014559
14560 // Close HDR+ client so Easel can enter low power mode.
14561 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14562 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014563 }
14564
14565 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014566 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014567 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14568}
14569
14570status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014571{
14572 pbcamera::InputConfiguration inputConfig;
14573 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14574 status_t res = OK;
14575
14576 // Configure HDR+ client streams.
14577 // Get input config.
14578 if (mHdrPlusRawSrcChannel) {
14579 // HDR+ input buffers will be provided by HAL.
14580 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14581 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14582 if (res != OK) {
14583 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14584 __FUNCTION__, strerror(-res), res);
14585 return res;
14586 }
14587
14588 inputConfig.isSensorInput = false;
14589 } else {
14590 // Sensor MIPI will send data to Easel.
14591 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014592 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014593 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14594 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14595 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14596 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14597 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14598 if (mSensorModeInfo.num_raw_bits != 10) {
14599 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14600 mSensorModeInfo.num_raw_bits);
14601 return BAD_VALUE;
14602 }
14603
14604 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014605 }
14606
14607 // Get output configurations.
14608 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014609 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014610
14611 // Easel may need to output YUV output buffers if mPictureChannel was created.
14612 pbcamera::StreamConfiguration yuvOutputConfig;
14613 if (mPictureChannel != nullptr) {
14614 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14615 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14616 if (res != OK) {
14617 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14618 __FUNCTION__, strerror(-res), res);
14619
14620 return res;
14621 }
14622
14623 outputStreamConfigs.push_back(yuvOutputConfig);
14624 }
14625
14626 // TODO: consider other channels for YUV output buffers.
14627
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014628 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014629 if (res != OK) {
14630 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14631 strerror(-res), res);
14632 return res;
14633 }
14634
14635 return OK;
14636}
14637
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014638void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14639{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014640 if (client == nullptr) {
14641 ALOGE("%s: Opened client is null.", __FUNCTION__);
14642 return;
14643 }
14644
14645 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14646
14647 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014648 if (!gHdrPlusClientOpening) {
14649 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14650 return;
14651 }
14652
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014653 gHdrPlusClient = std::move(client);
14654 gHdrPlusClientOpening = false;
14655
14656 // Set static metadata.
14657 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14658 if (res != OK) {
14659 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14660 __FUNCTION__, strerror(-res), res);
14661 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14662 gHdrPlusClient = nullptr;
14663 return;
14664 }
14665
14666 // Enable HDR+ mode.
14667 res = enableHdrPlusModeLocked();
14668 if (res != OK) {
14669 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14670 }
14671}
14672
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014673void QCamera3HardwareInterface::onOpenFailed(status_t err)
14674{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014675 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14676 Mutex::Autolock l(gHdrPlusClientLock);
14677 gHdrPlusClientOpening = false;
14678}
14679
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014680void QCamera3HardwareInterface::onFatalError()
14681{
14682 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14683
14684 // Set HAL state to error.
14685 pthread_mutex_lock(&mMutex);
14686 mState = ERROR;
14687 pthread_mutex_unlock(&mMutex);
14688
14689 handleCameraDeviceError();
14690}
14691
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014692void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014693 const camera_metadata_t &resultMetadata)
14694{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014695 if (result != nullptr) {
14696 if (result->outputBuffers.size() != 1) {
14697 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14698 result->outputBuffers.size());
14699 return;
14700 }
14701
14702 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14703 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14704 result->outputBuffers[0].streamId);
14705 return;
14706 }
14707
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014708 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014709 HdrPlusPendingRequest pendingRequest;
14710 {
14711 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14712 auto req = mHdrPlusPendingRequests.find(result->requestId);
14713 pendingRequest = req->second;
14714 }
14715
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014716 // Update the result metadata with the settings of the HDR+ still capture request because
14717 // the result metadata belongs to a ZSL buffer.
14718 CameraMetadata metadata;
14719 metadata = &resultMetadata;
14720 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14721 camera_metadata_t* updatedResultMetadata = metadata.release();
14722
14723 QCamera3PicChannel *picChannel =
14724 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14725
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014726 // Check if dumping HDR+ YUV output is enabled.
14727 char prop[PROPERTY_VALUE_MAX];
14728 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14729 bool dumpYuvOutput = atoi(prop);
14730
14731 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014732 // Dump yuv buffer to a ppm file.
14733 pbcamera::StreamConfiguration outputConfig;
14734 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14735 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14736 if (rc == OK) {
14737 char buf[FILENAME_MAX] = {};
14738 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14739 result->requestId, result->outputBuffers[0].streamId,
14740 outputConfig.image.width, outputConfig.image.height);
14741
14742 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14743 } else {
14744 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14745 __FUNCTION__, strerror(-rc), rc);
14746 }
14747 }
14748
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014749 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14750 auto halMetadata = std::make_shared<metadata_buffer_t>();
14751 clear_metadata_buffer(halMetadata.get());
14752
14753 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14754 // encoding.
14755 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14756 halStreamId, /*minFrameDuration*/0);
14757 if (res == OK) {
14758 // Return the buffer to pic channel for encoding.
14759 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14760 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14761 halMetadata);
14762 } else {
14763 // Return the buffer without encoding.
14764 // TODO: This should not happen but we may want to report an error buffer to camera
14765 // service.
14766 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14767 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14768 strerror(-res), res);
14769 }
14770
14771 // Send HDR+ metadata to framework.
14772 {
14773 pthread_mutex_lock(&mMutex);
14774
14775 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14776 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14777 pthread_mutex_unlock(&mMutex);
14778 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014779
14780 // Remove the HDR+ pending request.
14781 {
14782 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14783 auto req = mHdrPlusPendingRequests.find(result->requestId);
14784 mHdrPlusPendingRequests.erase(req);
14785 }
14786 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014787}
14788
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014789void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14790{
14791 if (failedResult == nullptr) {
14792 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14793 return;
14794 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014795
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014796 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014797
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014798 // Remove the pending HDR+ request.
14799 {
14800 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14801 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14802
14803 // Return the buffer to pic channel.
14804 QCamera3PicChannel *picChannel =
14805 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14806 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14807
14808 mHdrPlusPendingRequests.erase(pendingRequest);
14809 }
14810
14811 pthread_mutex_lock(&mMutex);
14812
14813 // Find the pending buffers.
14814 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14815 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14816 if (pendingBuffers->frame_number == failedResult->requestId) {
14817 break;
14818 }
14819 pendingBuffers++;
14820 }
14821
14822 // Send out buffer errors for the pending buffers.
14823 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14824 std::vector<camera3_stream_buffer_t> streamBuffers;
14825 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14826 // Prepare a stream buffer.
14827 camera3_stream_buffer_t streamBuffer = {};
14828 streamBuffer.stream = buffer.stream;
14829 streamBuffer.buffer = buffer.buffer;
14830 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14831 streamBuffer.acquire_fence = -1;
14832 streamBuffer.release_fence = -1;
14833
14834 streamBuffers.push_back(streamBuffer);
14835
14836 // Send out error buffer event.
14837 camera3_notify_msg_t notify_msg = {};
14838 notify_msg.type = CAMERA3_MSG_ERROR;
14839 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14840 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14841 notify_msg.message.error.error_stream = buffer.stream;
14842
14843 orchestrateNotify(&notify_msg);
14844 }
14845
14846 camera3_capture_result_t result = {};
14847 result.frame_number = pendingBuffers->frame_number;
14848 result.num_output_buffers = streamBuffers.size();
14849 result.output_buffers = &streamBuffers[0];
14850
14851 // Send out result with buffer errors.
14852 orchestrateResult(&result);
14853
14854 // Remove pending buffers.
14855 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14856 }
14857
14858 // Remove pending request.
14859 auto halRequest = mPendingRequestsList.begin();
14860 while (halRequest != mPendingRequestsList.end()) {
14861 if (halRequest->frame_number == failedResult->requestId) {
14862 mPendingRequestsList.erase(halRequest);
14863 break;
14864 }
14865 halRequest++;
14866 }
14867
14868 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014869}
14870
Thierry Strudel3d639192016-09-09 11:52:26 -070014871}; //end namespace qcamera