blob: 40abf9f0334e6d35bbc878982445caa515d74456 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
134#define MAX_PREFERRED_ZOOM_RATIO 5.0
135
Thierry Strudel3d639192016-09-09 11:52:26 -0700136cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
137const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
138extern pthread_mutex_t gCamLock;
139volatile uint32_t gCamHal3LogLevel = 1;
140extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700141
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800142// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143// The following Easel related variables must be protected by gHdrPlusClientLock.
144EaselManagerClient gEaselManagerClient;
145bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
146std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
147bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700148bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700149bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800151// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
152bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
154Mutex gHdrPlusClientLock; // Protect above Easel related variables.
155
Thierry Strudel3d639192016-09-09 11:52:26 -0700156
157const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
158 {"On", CAM_CDS_MODE_ON},
159 {"Off", CAM_CDS_MODE_OFF},
160 {"Auto",CAM_CDS_MODE_AUTO}
161};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700162const QCamera3HardwareInterface::QCameraMap<
163 camera_metadata_enum_android_video_hdr_mode_t,
164 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
165 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
166 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
167};
168
Thierry Strudel54dc9782017-02-15 12:12:10 -0800169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_binning_correction_mode_t,
171 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
172 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
173 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
174};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700175
176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_ir_mode_t,
178 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
179 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
180 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
181 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
182};
Thierry Strudel3d639192016-09-09 11:52:26 -0700183
184const QCamera3HardwareInterface::QCameraMap<
185 camera_metadata_enum_android_control_effect_mode_t,
186 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
187 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
188 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
189 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
190 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
191 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
192 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
193 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
194 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
195 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
196};
197
198const QCamera3HardwareInterface::QCameraMap<
199 camera_metadata_enum_android_control_awb_mode_t,
200 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
201 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
202 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
203 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
204 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
205 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
206 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
207 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
208 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
209 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
210};
211
212const QCamera3HardwareInterface::QCameraMap<
213 camera_metadata_enum_android_control_scene_mode_t,
214 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
215 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
216 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
217 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
218 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
219 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
220 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
222 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
223 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
224 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
225 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
226 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
227 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
228 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
229 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800230 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
231 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700232};
233
234const QCamera3HardwareInterface::QCameraMap<
235 camera_metadata_enum_android_control_af_mode_t,
236 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
237 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
238 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
239 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
240 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
241 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
242 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
243 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
244};
245
246const QCamera3HardwareInterface::QCameraMap<
247 camera_metadata_enum_android_color_correction_aberration_mode_t,
248 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
249 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
250 CAM_COLOR_CORRECTION_ABERRATION_OFF },
251 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
252 CAM_COLOR_CORRECTION_ABERRATION_FAST },
253 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
254 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_control_ae_antibanding_mode_t,
259 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
260 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
261 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
262 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_control_ae_mode_t,
268 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
269 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
270 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
271 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
272 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
273 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
274};
275
276const QCamera3HardwareInterface::QCameraMap<
277 camera_metadata_enum_android_flash_mode_t,
278 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
279 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
280 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
281 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_statistics_face_detect_mode_t,
286 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
287 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
288 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
289 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
294 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
295 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
296 CAM_FOCUS_UNCALIBRATED },
297 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
298 CAM_FOCUS_APPROXIMATE },
299 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
300 CAM_FOCUS_CALIBRATED }
301};
302
303const QCamera3HardwareInterface::QCameraMap<
304 camera_metadata_enum_android_lens_state_t,
305 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
306 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
307 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
308};
309
310const int32_t available_thumbnail_sizes[] = {0, 0,
311 176, 144,
312 240, 144,
313 256, 144,
314 240, 160,
315 256, 154,
316 240, 240,
317 320, 240};
318
319const QCamera3HardwareInterface::QCameraMap<
320 camera_metadata_enum_android_sensor_test_pattern_mode_t,
321 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
322 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
323 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
324 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
328};
329
330/* Since there is no mapping for all the options some Android enum are not listed.
331 * Also, the order in this list is important because while mapping from HAL to Android it will
332 * traverse from lower to higher index which means that for HAL values that are map to different
333 * Android values, the traverse logic will select the first one found.
334 */
335const QCamera3HardwareInterface::QCameraMap<
336 camera_metadata_enum_android_sensor_reference_illuminant1_t,
337 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
338 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
339 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
340 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
354};
355
356const QCamera3HardwareInterface::QCameraMap<
357 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
358 { 60, CAM_HFR_MODE_60FPS},
359 { 90, CAM_HFR_MODE_90FPS},
360 { 120, CAM_HFR_MODE_120FPS},
361 { 150, CAM_HFR_MODE_150FPS},
362 { 180, CAM_HFR_MODE_180FPS},
363 { 210, CAM_HFR_MODE_210FPS},
364 { 240, CAM_HFR_MODE_240FPS},
365 { 480, CAM_HFR_MODE_480FPS},
366};
367
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700368const QCamera3HardwareInterface::QCameraMap<
369 qcamera3_ext_instant_aec_mode_t,
370 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
371 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
372 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
373 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
374};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800375
376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_exposure_meter_mode_t,
378 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
379 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
380 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
381 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
382 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
383 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
386};
387
388const QCamera3HardwareInterface::QCameraMap<
389 qcamera3_ext_iso_mode_t,
390 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
391 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
392 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
393 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
394 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
395 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
396 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
397 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
398 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
399};
400
Thierry Strudel3d639192016-09-09 11:52:26 -0700401camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
402 .initialize = QCamera3HardwareInterface::initialize,
403 .configure_streams = QCamera3HardwareInterface::configure_streams,
404 .register_stream_buffers = NULL,
405 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
406 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
407 .get_metadata_vendor_tag_ops = NULL,
408 .dump = QCamera3HardwareInterface::dump,
409 .flush = QCamera3HardwareInterface::flush,
410 .reserved = {0},
411};
412
413// initialise to some default value
414uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
415
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700416static inline void logEaselEvent(const char *tag, const char *event) {
417 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
418 struct timespec ts = {};
419 static int64_t kMsPerSec = 1000;
420 static int64_t kNsPerMs = 1000000;
421 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
422 if (res != OK) {
423 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
424 } else {
425 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
426 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
427 }
428 }
429}
430
Thierry Strudel3d639192016-09-09 11:52:26 -0700431/*===========================================================================
432 * FUNCTION : QCamera3HardwareInterface
433 *
434 * DESCRIPTION: constructor of QCamera3HardwareInterface
435 *
436 * PARAMETERS :
437 * @cameraId : camera ID
438 *
439 * RETURN : none
440 *==========================================================================*/
441QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
442 const camera_module_callbacks_t *callbacks)
443 : mCameraId(cameraId),
444 mCameraHandle(NULL),
445 mCameraInitialized(false),
446 mCallbackOps(NULL),
447 mMetadataChannel(NULL),
448 mPictureChannel(NULL),
449 mRawChannel(NULL),
450 mSupportChannel(NULL),
451 mAnalysisChannel(NULL),
452 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700453 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800455 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mChannelHandle(0),
458 mFirstConfiguration(true),
459 mFlush(false),
460 mFlushPerf(false),
461 mParamHeap(NULL),
462 mParameters(NULL),
463 mPrevParameters(NULL),
464 m_bIsVideo(false),
465 m_bIs4KVideo(false),
466 m_bEisSupportedSize(false),
467 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800468 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700469 m_MobicatMask(0),
470 mMinProcessedFrameDuration(0),
471 mMinJpegFrameDuration(0),
472 mMinRawFrameDuration(0),
473 mMetaFrameCount(0U),
474 mUpdateDebugLevel(false),
475 mCallbacks(callbacks),
476 mCaptureIntent(0),
477 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700478 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800479 /* DevCamDebug metadata internal m control*/
480 mDevCamDebugMetaEnable(0),
481 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mBatchSize(0),
483 mToBeQueuedVidBufs(0),
484 mHFRVideoFps(DEFAULT_VIDEO_FPS),
485 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800486 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800487 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700488 mFirstFrameNumberInBatch(0),
489 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800490 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700491 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
492 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000493 mPDSupported(false),
494 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700495 mInstantAEC(false),
496 mResetInstantAEC(false),
497 mInstantAECSettledFrameNumber(0),
498 mAecSkipDisplayFrameBound(0),
499 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800500 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700502 mLastCustIntentFrmNum(-1),
503 mState(CLOSED),
504 mIsDeviceLinked(false),
505 mIsMainCamera(true),
506 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700507 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800508 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800509 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700510 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800511 mIsApInputUsedForHdrPlus(false),
512 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800513 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700514{
515 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700516 mCommon.init(gCamCapability[cameraId]);
517 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700518#ifndef USE_HAL_3_3
519 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
520#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700521 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700522#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700523 mCameraDevice.common.close = close_camera_device;
524 mCameraDevice.ops = &mCameraOps;
525 mCameraDevice.priv = this;
526 gCamCapability[cameraId]->version = CAM_HAL_V3;
527 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
528 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
529 gCamCapability[cameraId]->min_num_pp_bufs = 3;
530
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800531 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700532
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800533 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mPendingLiveRequest = 0;
535 mCurrentRequestId = -1;
536 pthread_mutex_init(&mMutex, NULL);
537
538 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
539 mDefaultMetadata[i] = NULL;
540
541 // Getting system props of different kinds
542 char prop[PROPERTY_VALUE_MAX];
543 memset(prop, 0, sizeof(prop));
544 property_get("persist.camera.raw.dump", prop, "0");
545 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800546 property_get("persist.camera.hal3.force.hdr", prop, "0");
547 mForceHdrSnapshot = atoi(prop);
548
Thierry Strudel3d639192016-09-09 11:52:26 -0700549 if (mEnableRawDump)
550 LOGD("Raw dump from Camera HAL enabled");
551
552 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
553 memset(mLdafCalib, 0, sizeof(mLdafCalib));
554
555 memset(prop, 0, sizeof(prop));
556 property_get("persist.camera.tnr.preview", prop, "0");
557 m_bTnrPreview = (uint8_t)atoi(prop);
558
559 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800560 property_get("persist.camera.swtnr.preview", prop, "1");
561 m_bSwTnrPreview = (uint8_t)atoi(prop);
562
563 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700564 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700565 m_bTnrVideo = (uint8_t)atoi(prop);
566
567 memset(prop, 0, sizeof(prop));
568 property_get("persist.camera.avtimer.debug", prop, "0");
569 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800570 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700571
Thierry Strudel54dc9782017-02-15 12:12:10 -0800572 memset(prop, 0, sizeof(prop));
573 property_get("persist.camera.cacmode.disable", prop, "0");
574 m_cacModeDisabled = (uint8_t)atoi(prop);
575
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 //Load and read GPU library.
577 lib_surface_utils = NULL;
578 LINK_get_surface_pixel_alignment = NULL;
579 mSurfaceStridePadding = CAM_PAD_TO_32;
580 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
581 if (lib_surface_utils) {
582 *(void **)&LINK_get_surface_pixel_alignment =
583 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
584 if (LINK_get_surface_pixel_alignment) {
585 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
586 }
587 dlclose(lib_surface_utils);
588 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700589
Emilian Peev0f3c3162017-03-15 12:57:46 +0000590 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
591 mPDSupported = (0 <= mPDIndex) ? true : false;
592
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700593 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700594}
595
596/*===========================================================================
597 * FUNCTION : ~QCamera3HardwareInterface
598 *
599 * DESCRIPTION: destructor of QCamera3HardwareInterface
600 *
601 * PARAMETERS : none
602 *
603 * RETURN : none
604 *==========================================================================*/
605QCamera3HardwareInterface::~QCamera3HardwareInterface()
606{
607 LOGD("E");
608
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800609 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700610
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800611 // Disable power hint and enable the perf lock for close camera
612 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
613 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
614
615 // unlink of dualcam during close camera
616 if (mIsDeviceLinked) {
617 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
618 &m_pDualCamCmdPtr->bundle_info;
619 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
620 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
621 pthread_mutex_lock(&gCamLock);
622
623 if (mIsMainCamera == 1) {
624 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
625 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
626 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
627 // related session id should be session id of linked session
628 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
629 } else {
630 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
631 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
632 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
633 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
634 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800635 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800636 pthread_mutex_unlock(&gCamLock);
637
638 rc = mCameraHandle->ops->set_dual_cam_cmd(
639 mCameraHandle->camera_handle);
640 if (rc < 0) {
641 LOGE("Dualcam: Unlink failed, but still proceed to close");
642 }
643 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700644
645 /* We need to stop all streams before deleting any stream */
646 if (mRawDumpChannel) {
647 mRawDumpChannel->stop();
648 }
649
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700650 if (mHdrPlusRawSrcChannel) {
651 mHdrPlusRawSrcChannel->stop();
652 }
653
Thierry Strudel3d639192016-09-09 11:52:26 -0700654 // NOTE: 'camera3_stream_t *' objects are already freed at
655 // this stage by the framework
656 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
657 it != mStreamInfo.end(); it++) {
658 QCamera3ProcessingChannel *channel = (*it)->channel;
659 if (channel) {
660 channel->stop();
661 }
662 }
663 if (mSupportChannel)
664 mSupportChannel->stop();
665
666 if (mAnalysisChannel) {
667 mAnalysisChannel->stop();
668 }
669 if (mMetadataChannel) {
670 mMetadataChannel->stop();
671 }
672 if (mChannelHandle) {
673 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
674 mChannelHandle);
675 LOGD("stopping channel %d", mChannelHandle);
676 }
677
678 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
679 it != mStreamInfo.end(); it++) {
680 QCamera3ProcessingChannel *channel = (*it)->channel;
681 if (channel)
682 delete channel;
683 free (*it);
684 }
685 if (mSupportChannel) {
686 delete mSupportChannel;
687 mSupportChannel = NULL;
688 }
689
690 if (mAnalysisChannel) {
691 delete mAnalysisChannel;
692 mAnalysisChannel = NULL;
693 }
694 if (mRawDumpChannel) {
695 delete mRawDumpChannel;
696 mRawDumpChannel = NULL;
697 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700698 if (mHdrPlusRawSrcChannel) {
699 delete mHdrPlusRawSrcChannel;
700 mHdrPlusRawSrcChannel = NULL;
701 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700702 if (mDummyBatchChannel) {
703 delete mDummyBatchChannel;
704 mDummyBatchChannel = NULL;
705 }
706
707 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800708 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700709
710 if (mMetadataChannel) {
711 delete mMetadataChannel;
712 mMetadataChannel = NULL;
713 }
714
715 /* Clean up all channels */
716 if (mCameraInitialized) {
717 if(!mFirstConfiguration){
718 //send the last unconfigure
719 cam_stream_size_info_t stream_config_info;
720 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
721 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
722 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800723 m_bIs4KVideo ? 0 :
724 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700725 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700726 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
727 stream_config_info);
728 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
729 if (rc < 0) {
730 LOGE("set_parms failed for unconfigure");
731 }
732 }
733 deinitParameters();
734 }
735
736 if (mChannelHandle) {
737 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
738 mChannelHandle);
739 LOGH("deleting channel %d", mChannelHandle);
740 mChannelHandle = 0;
741 }
742
743 if (mState != CLOSED)
744 closeCamera();
745
746 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
747 req.mPendingBufferList.clear();
748 }
749 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700750 for (pendingRequestIterator i = mPendingRequestsList.begin();
751 i != mPendingRequestsList.end();) {
752 i = erasePendingRequest(i);
753 }
754 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
755 if (mDefaultMetadata[i])
756 free_camera_metadata(mDefaultMetadata[i]);
757
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800758 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700759
760 pthread_cond_destroy(&mRequestCond);
761
762 pthread_cond_destroy(&mBuffersCond);
763
764 pthread_mutex_destroy(&mMutex);
765 LOGD("X");
766}
767
768/*===========================================================================
769 * FUNCTION : erasePendingRequest
770 *
771 * DESCRIPTION: function to erase a desired pending request after freeing any
772 * allocated memory
773 *
774 * PARAMETERS :
775 * @i : iterator pointing to pending request to be erased
776 *
777 * RETURN : iterator pointing to the next request
778 *==========================================================================*/
779QCamera3HardwareInterface::pendingRequestIterator
780 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
781{
782 if (i->input_buffer != NULL) {
783 free(i->input_buffer);
784 i->input_buffer = NULL;
785 }
786 if (i->settings != NULL)
787 free_camera_metadata((camera_metadata_t*)i->settings);
788 return mPendingRequestsList.erase(i);
789}
790
791/*===========================================================================
792 * FUNCTION : camEvtHandle
793 *
794 * DESCRIPTION: Function registered to mm-camera-interface to handle events
795 *
796 * PARAMETERS :
797 * @camera_handle : interface layer camera handle
798 * @evt : ptr to event
799 * @user_data : user data ptr
800 *
801 * RETURN : none
802 *==========================================================================*/
803void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
804 mm_camera_event_t *evt,
805 void *user_data)
806{
807 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
808 if (obj && evt) {
809 switch(evt->server_event_type) {
810 case CAM_EVENT_TYPE_DAEMON_DIED:
811 pthread_mutex_lock(&obj->mMutex);
812 obj->mState = ERROR;
813 pthread_mutex_unlock(&obj->mMutex);
814 LOGE("Fatal, camera daemon died");
815 break;
816
817 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
818 LOGD("HAL got request pull from Daemon");
819 pthread_mutex_lock(&obj->mMutex);
820 obj->mWokenUpByDaemon = true;
821 obj->unblockRequestIfNecessary();
822 pthread_mutex_unlock(&obj->mMutex);
823 break;
824
825 default:
826 LOGW("Warning: Unhandled event %d",
827 evt->server_event_type);
828 break;
829 }
830 } else {
831 LOGE("NULL user_data/evt");
832 }
833}
834
835/*===========================================================================
836 * FUNCTION : openCamera
837 *
838 * DESCRIPTION: open camera
839 *
840 * PARAMETERS :
841 * @hw_device : double ptr for camera device struct
842 *
843 * RETURN : int32_t type of status
844 * NO_ERROR -- success
845 * none-zero failure code
846 *==========================================================================*/
847int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
848{
849 int rc = 0;
850 if (mState != CLOSED) {
851 *hw_device = NULL;
852 return PERMISSION_DENIED;
853 }
854
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800855 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700856 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
857 mCameraId);
858
859 rc = openCamera();
860 if (rc == 0) {
861 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800862 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700863 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800864 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700865
Thierry Strudel3d639192016-09-09 11:52:26 -0700866 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
867 mCameraId, rc);
868
869 if (rc == NO_ERROR) {
870 mState = OPENED;
871 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800872
Thierry Strudel3d639192016-09-09 11:52:26 -0700873 return rc;
874}
875
876/*===========================================================================
877 * FUNCTION : openCamera
878 *
879 * DESCRIPTION: open camera
880 *
881 * PARAMETERS : none
882 *
883 * RETURN : int32_t type of status
884 * NO_ERROR -- success
885 * none-zero failure code
886 *==========================================================================*/
887int QCamera3HardwareInterface::openCamera()
888{
889 int rc = 0;
890 char value[PROPERTY_VALUE_MAX];
891
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800892 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700893 if (mCameraHandle) {
894 LOGE("Failure: Camera already opened");
895 return ALREADY_EXISTS;
896 }
897
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700898 {
899 Mutex::Autolock l(gHdrPlusClientLock);
900 if (gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700901 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700902 rc = gEaselManagerClient.resume();
903 if (rc != 0) {
904 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
905 return rc;
906 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800907 }
908 }
909
Thierry Strudel3d639192016-09-09 11:52:26 -0700910 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
911 if (rc < 0) {
912 LOGE("Failed to reserve flash for camera id: %d",
913 mCameraId);
914 return UNKNOWN_ERROR;
915 }
916
917 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
918 if (rc) {
919 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
920 return rc;
921 }
922
923 if (!mCameraHandle) {
924 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
925 return -ENODEV;
926 }
927
928 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
929 camEvtHandle, (void *)this);
930
931 if (rc < 0) {
932 LOGE("Error, failed to register event callback");
933 /* Not closing camera here since it is already handled in destructor */
934 return FAILED_TRANSACTION;
935 }
936
937 mExifParams.debug_params =
938 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
939 if (mExifParams.debug_params) {
940 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
941 } else {
942 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
943 return NO_MEMORY;
944 }
945 mFirstConfiguration = true;
946
947 //Notify display HAL that a camera session is active.
948 //But avoid calling the same during bootup because camera service might open/close
949 //cameras at boot time during its initialization and display service will also internally
950 //wait for camera service to initialize first while calling this display API, resulting in a
951 //deadlock situation. Since boot time camera open/close calls are made only to fetch
952 //capabilities, no need of this display bw optimization.
953 //Use "service.bootanim.exit" property to know boot status.
954 property_get("service.bootanim.exit", value, "0");
955 if (atoi(value) == 1) {
956 pthread_mutex_lock(&gCamLock);
957 if (gNumCameraSessions++ == 0) {
958 setCameraLaunchStatus(true);
959 }
960 pthread_mutex_unlock(&gCamLock);
961 }
962
963 //fill the session id needed while linking dual cam
964 pthread_mutex_lock(&gCamLock);
965 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
966 &sessionId[mCameraId]);
967 pthread_mutex_unlock(&gCamLock);
968
969 if (rc < 0) {
970 LOGE("Error, failed to get sessiion id");
971 return UNKNOWN_ERROR;
972 } else {
973 //Allocate related cam sync buffer
974 //this is needed for the payload that goes along with bundling cmd for related
975 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700976 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
977 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700978 if(rc != OK) {
979 rc = NO_MEMORY;
980 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
981 return NO_MEMORY;
982 }
983
984 //Map memory for related cam sync buffer
985 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700986 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
987 m_pDualCamCmdHeap->getFd(0),
988 sizeof(cam_dual_camera_cmd_info_t),
989 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700990 if(rc < 0) {
991 LOGE("Dualcam: failed to map Related cam sync buffer");
992 rc = FAILED_TRANSACTION;
993 return NO_MEMORY;
994 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700995 m_pDualCamCmdPtr =
996 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700997 }
998
999 LOGH("mCameraId=%d",mCameraId);
1000
1001 return NO_ERROR;
1002}
1003
1004/*===========================================================================
1005 * FUNCTION : closeCamera
1006 *
1007 * DESCRIPTION: close camera
1008 *
1009 * PARAMETERS : none
1010 *
1011 * RETURN : int32_t type of status
1012 * NO_ERROR -- success
1013 * none-zero failure code
1014 *==========================================================================*/
1015int QCamera3HardwareInterface::closeCamera()
1016{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001017 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001018 int rc = NO_ERROR;
1019 char value[PROPERTY_VALUE_MAX];
1020
1021 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1022 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001023
1024 // unmap memory for related cam sync buffer
1025 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001026 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001027 if (NULL != m_pDualCamCmdHeap) {
1028 m_pDualCamCmdHeap->deallocate();
1029 delete m_pDualCamCmdHeap;
1030 m_pDualCamCmdHeap = NULL;
1031 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001032 }
1033
Thierry Strudel3d639192016-09-09 11:52:26 -07001034 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1035 mCameraHandle = NULL;
1036
1037 //reset session id to some invalid id
1038 pthread_mutex_lock(&gCamLock);
1039 sessionId[mCameraId] = 0xDEADBEEF;
1040 pthread_mutex_unlock(&gCamLock);
1041
1042 //Notify display HAL that there is no active camera session
1043 //but avoid calling the same during bootup. Refer to openCamera
1044 //for more details.
1045 property_get("service.bootanim.exit", value, "0");
1046 if (atoi(value) == 1) {
1047 pthread_mutex_lock(&gCamLock);
1048 if (--gNumCameraSessions == 0) {
1049 setCameraLaunchStatus(false);
1050 }
1051 pthread_mutex_unlock(&gCamLock);
1052 }
1053
Thierry Strudel3d639192016-09-09 11:52:26 -07001054 if (mExifParams.debug_params) {
1055 free(mExifParams.debug_params);
1056 mExifParams.debug_params = NULL;
1057 }
1058 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1059 LOGW("Failed to release flash for camera id: %d",
1060 mCameraId);
1061 }
1062 mState = CLOSED;
1063 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1064 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001065
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001066 {
1067 Mutex::Autolock l(gHdrPlusClientLock);
1068 if (gHdrPlusClient != nullptr) {
1069 // Disable HDR+ mode.
1070 disableHdrPlusModeLocked();
1071 // Disconnect Easel if it's connected.
1072 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1073 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001074 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001075
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001076 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001077 rc = gEaselManagerClient.stopMipi(mCameraId);
1078 if (rc != 0) {
1079 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1080 }
1081
1082 rc = gEaselManagerClient.suspend();
1083 if (rc != 0) {
1084 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1085 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001086 }
1087 }
1088
Thierry Strudel3d639192016-09-09 11:52:26 -07001089 return rc;
1090}
1091
1092/*===========================================================================
1093 * FUNCTION : initialize
1094 *
1095 * DESCRIPTION: Initialize frameworks callback functions
1096 *
1097 * PARAMETERS :
1098 * @callback_ops : callback function to frameworks
1099 *
1100 * RETURN :
1101 *
1102 *==========================================================================*/
1103int QCamera3HardwareInterface::initialize(
1104 const struct camera3_callback_ops *callback_ops)
1105{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001106 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001107 int rc;
1108
1109 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1110 pthread_mutex_lock(&mMutex);
1111
1112 // Validate current state
1113 switch (mState) {
1114 case OPENED:
1115 /* valid state */
1116 break;
1117 default:
1118 LOGE("Invalid state %d", mState);
1119 rc = -ENODEV;
1120 goto err1;
1121 }
1122
1123 rc = initParameters();
1124 if (rc < 0) {
1125 LOGE("initParamters failed %d", rc);
1126 goto err1;
1127 }
1128 mCallbackOps = callback_ops;
1129
1130 mChannelHandle = mCameraHandle->ops->add_channel(
1131 mCameraHandle->camera_handle, NULL, NULL, this);
1132 if (mChannelHandle == 0) {
1133 LOGE("add_channel failed");
1134 rc = -ENOMEM;
1135 pthread_mutex_unlock(&mMutex);
1136 return rc;
1137 }
1138
1139 pthread_mutex_unlock(&mMutex);
1140 mCameraInitialized = true;
1141 mState = INITIALIZED;
1142 LOGI("X");
1143 return 0;
1144
1145err1:
1146 pthread_mutex_unlock(&mMutex);
1147 return rc;
1148}
1149
1150/*===========================================================================
1151 * FUNCTION : validateStreamDimensions
1152 *
1153 * DESCRIPTION: Check if the configuration requested are those advertised
1154 *
1155 * PARAMETERS :
1156 * @stream_list : streams to be configured
1157 *
1158 * RETURN :
1159 *
1160 *==========================================================================*/
1161int QCamera3HardwareInterface::validateStreamDimensions(
1162 camera3_stream_configuration_t *streamList)
1163{
1164 int rc = NO_ERROR;
1165 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001166 uint32_t depthWidth = 0;
1167 uint32_t depthHeight = 0;
1168 if (mPDSupported) {
1169 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1170 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1171 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001172
1173 camera3_stream_t *inputStream = NULL;
1174 /*
1175 * Loop through all streams to find input stream if it exists*
1176 */
1177 for (size_t i = 0; i< streamList->num_streams; i++) {
1178 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1179 if (inputStream != NULL) {
1180 LOGE("Error, Multiple input streams requested");
1181 return -EINVAL;
1182 }
1183 inputStream = streamList->streams[i];
1184 }
1185 }
1186 /*
1187 * Loop through all streams requested in configuration
1188 * Check if unsupported sizes have been requested on any of them
1189 */
1190 for (size_t j = 0; j < streamList->num_streams; j++) {
1191 bool sizeFound = false;
1192 camera3_stream_t *newStream = streamList->streams[j];
1193
1194 uint32_t rotatedHeight = newStream->height;
1195 uint32_t rotatedWidth = newStream->width;
1196 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1197 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1198 rotatedHeight = newStream->width;
1199 rotatedWidth = newStream->height;
1200 }
1201
1202 /*
1203 * Sizes are different for each type of stream format check against
1204 * appropriate table.
1205 */
1206 switch (newStream->format) {
1207 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1208 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1209 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001210 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1211 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1212 mPDSupported) {
1213 if ((depthWidth == newStream->width) &&
1214 (depthHeight == newStream->height)) {
1215 sizeFound = true;
1216 }
1217 break;
1218 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001219 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1220 for (size_t i = 0; i < count; i++) {
1221 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1222 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1223 sizeFound = true;
1224 break;
1225 }
1226 }
1227 break;
1228 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001229 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1230 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001231 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001232 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001233 if ((depthSamplesCount == newStream->width) &&
1234 (1 == newStream->height)) {
1235 sizeFound = true;
1236 }
1237 break;
1238 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001239 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1240 /* Verify set size against generated sizes table */
1241 for (size_t i = 0; i < count; i++) {
1242 if (((int32_t)rotatedWidth ==
1243 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1244 ((int32_t)rotatedHeight ==
1245 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1246 sizeFound = true;
1247 break;
1248 }
1249 }
1250 break;
1251 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1252 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1253 default:
1254 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1255 || newStream->stream_type == CAMERA3_STREAM_INPUT
1256 || IS_USAGE_ZSL(newStream->usage)) {
1257 if (((int32_t)rotatedWidth ==
1258 gCamCapability[mCameraId]->active_array_size.width) &&
1259 ((int32_t)rotatedHeight ==
1260 gCamCapability[mCameraId]->active_array_size.height)) {
1261 sizeFound = true;
1262 break;
1263 }
1264 /* We could potentially break here to enforce ZSL stream
1265 * set from frameworks always is full active array size
1266 * but it is not clear from the spc if framework will always
1267 * follow that, also we have logic to override to full array
1268 * size, so keeping the logic lenient at the moment
1269 */
1270 }
1271 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1272 MAX_SIZES_CNT);
1273 for (size_t i = 0; i < count; i++) {
1274 if (((int32_t)rotatedWidth ==
1275 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1276 ((int32_t)rotatedHeight ==
1277 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1278 sizeFound = true;
1279 break;
1280 }
1281 }
1282 break;
1283 } /* End of switch(newStream->format) */
1284
1285 /* We error out even if a single stream has unsupported size set */
1286 if (!sizeFound) {
1287 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1288 rotatedWidth, rotatedHeight, newStream->format,
1289 gCamCapability[mCameraId]->active_array_size.width,
1290 gCamCapability[mCameraId]->active_array_size.height);
1291 rc = -EINVAL;
1292 break;
1293 }
1294 } /* End of for each stream */
1295 return rc;
1296}
1297
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001298/*===========================================================================
1299 * FUNCTION : validateUsageFlags
1300 *
1301 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1302 *
1303 * PARAMETERS :
1304 * @stream_list : streams to be configured
1305 *
1306 * RETURN :
1307 * NO_ERROR if the usage flags are supported
1308 * error code if usage flags are not supported
1309 *
1310 *==========================================================================*/
1311int QCamera3HardwareInterface::validateUsageFlags(
1312 const camera3_stream_configuration_t* streamList)
1313{
1314 for (size_t j = 0; j < streamList->num_streams; j++) {
1315 const camera3_stream_t *newStream = streamList->streams[j];
1316
1317 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1318 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1319 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1320 continue;
1321 }
1322
1323 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1324 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1325 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1326 bool forcePreviewUBWC = true;
1327 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1328 forcePreviewUBWC = false;
1329 }
1330 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1331 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1332 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1333 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1334 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1335 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1336
1337 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1338 // So color spaces will always match.
1339
1340 // Check whether underlying formats of shared streams match.
1341 if (isVideo && isPreview && videoFormat != previewFormat) {
1342 LOGE("Combined video and preview usage flag is not supported");
1343 return -EINVAL;
1344 }
1345 if (isPreview && isZSL && previewFormat != zslFormat) {
1346 LOGE("Combined preview and zsl usage flag is not supported");
1347 return -EINVAL;
1348 }
1349 if (isVideo && isZSL && videoFormat != zslFormat) {
1350 LOGE("Combined video and zsl usage flag is not supported");
1351 return -EINVAL;
1352 }
1353 }
1354 return NO_ERROR;
1355}
1356
1357/*===========================================================================
1358 * FUNCTION : validateUsageFlagsForEis
1359 *
1360 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1361 *
1362 * PARAMETERS :
1363 * @stream_list : streams to be configured
1364 *
1365 * RETURN :
1366 * NO_ERROR if the usage flags are supported
1367 * error code if usage flags are not supported
1368 *
1369 *==========================================================================*/
1370int QCamera3HardwareInterface::validateUsageFlagsForEis(
1371 const camera3_stream_configuration_t* streamList)
1372{
1373 for (size_t j = 0; j < streamList->num_streams; j++) {
1374 const camera3_stream_t *newStream = streamList->streams[j];
1375
1376 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1377 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1378
1379 // Because EIS is "hard-coded" for certain use case, and current
1380 // implementation doesn't support shared preview and video on the same
1381 // stream, return failure if EIS is forced on.
1382 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1383 LOGE("Combined video and preview usage flag is not supported due to EIS");
1384 return -EINVAL;
1385 }
1386 }
1387 return NO_ERROR;
1388}
1389
Thierry Strudel3d639192016-09-09 11:52:26 -07001390/*==============================================================================
1391 * FUNCTION : isSupportChannelNeeded
1392 *
1393 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1394 *
1395 * PARAMETERS :
1396 * @stream_list : streams to be configured
1397 * @stream_config_info : the config info for streams to be configured
1398 *
1399 * RETURN : Boolen true/false decision
1400 *
1401 *==========================================================================*/
1402bool QCamera3HardwareInterface::isSupportChannelNeeded(
1403 camera3_stream_configuration_t *streamList,
1404 cam_stream_size_info_t stream_config_info)
1405{
1406 uint32_t i;
1407 bool pprocRequested = false;
1408 /* Check for conditions where PProc pipeline does not have any streams*/
1409 for (i = 0; i < stream_config_info.num_streams; i++) {
1410 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1411 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1412 pprocRequested = true;
1413 break;
1414 }
1415 }
1416
1417 if (pprocRequested == false )
1418 return true;
1419
1420 /* Dummy stream needed if only raw or jpeg streams present */
1421 for (i = 0; i < streamList->num_streams; i++) {
1422 switch(streamList->streams[i]->format) {
1423 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1424 case HAL_PIXEL_FORMAT_RAW10:
1425 case HAL_PIXEL_FORMAT_RAW16:
1426 case HAL_PIXEL_FORMAT_BLOB:
1427 break;
1428 default:
1429 return false;
1430 }
1431 }
1432 return true;
1433}
1434
1435/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001436 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001437 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001438 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001439 *
1440 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001441 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001442 *
1443 * RETURN : int32_t type of status
1444 * NO_ERROR -- success
1445 * none-zero failure code
1446 *
1447 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001448int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001449{
1450 int32_t rc = NO_ERROR;
1451
1452 cam_dimension_t max_dim = {0, 0};
1453 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1454 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1455 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1456 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1457 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1458 }
1459
1460 clear_metadata_buffer(mParameters);
1461
1462 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1463 max_dim);
1464 if (rc != NO_ERROR) {
1465 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1466 return rc;
1467 }
1468
1469 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1470 if (rc != NO_ERROR) {
1471 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1472 return rc;
1473 }
1474
1475 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001476 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001477
1478 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1479 mParameters);
1480 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001481 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001482 return rc;
1483 }
1484
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001485 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001486 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1487 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1488 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1489 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1490 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001491
1492 return rc;
1493}
1494
1495/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001496 * FUNCTION : addToPPFeatureMask
1497 *
1498 * DESCRIPTION: add additional features to pp feature mask based on
1499 * stream type and usecase
1500 *
1501 * PARAMETERS :
1502 * @stream_format : stream type for feature mask
1503 * @stream_idx : stream idx within postprocess_mask list to change
1504 *
1505 * RETURN : NULL
1506 *
1507 *==========================================================================*/
1508void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1509 uint32_t stream_idx)
1510{
1511 char feature_mask_value[PROPERTY_VALUE_MAX];
1512 cam_feature_mask_t feature_mask;
1513 int args_converted;
1514 int property_len;
1515
1516 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001517#ifdef _LE_CAMERA_
1518 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1519 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1520 property_len = property_get("persist.camera.hal3.feature",
1521 feature_mask_value, swtnr_feature_mask_value);
1522#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001523 property_len = property_get("persist.camera.hal3.feature",
1524 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001525#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001526 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1527 (feature_mask_value[1] == 'x')) {
1528 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1529 } else {
1530 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1531 }
1532 if (1 != args_converted) {
1533 feature_mask = 0;
1534 LOGE("Wrong feature mask %s", feature_mask_value);
1535 return;
1536 }
1537
1538 switch (stream_format) {
1539 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1540 /* Add LLVD to pp feature mask only if video hint is enabled */
1541 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1542 mStreamConfigInfo.postprocess_mask[stream_idx]
1543 |= CAM_QTI_FEATURE_SW_TNR;
1544 LOGH("Added SW TNR to pp feature mask");
1545 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1546 mStreamConfigInfo.postprocess_mask[stream_idx]
1547 |= CAM_QCOM_FEATURE_LLVD;
1548 LOGH("Added LLVD SeeMore to pp feature mask");
1549 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001550 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1551 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1552 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1553 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001554 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1555 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1556 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1557 CAM_QTI_FEATURE_BINNING_CORRECTION;
1558 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001559 break;
1560 }
1561 default:
1562 break;
1563 }
1564 LOGD("PP feature mask %llx",
1565 mStreamConfigInfo.postprocess_mask[stream_idx]);
1566}
1567
1568/*==============================================================================
1569 * FUNCTION : updateFpsInPreviewBuffer
1570 *
1571 * DESCRIPTION: update FPS information in preview buffer.
1572 *
1573 * PARAMETERS :
1574 * @metadata : pointer to metadata buffer
1575 * @frame_number: frame_number to look for in pending buffer list
1576 *
1577 * RETURN : None
1578 *
1579 *==========================================================================*/
1580void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1581 uint32_t frame_number)
1582{
1583 // Mark all pending buffers for this particular request
1584 // with corresponding framerate information
1585 for (List<PendingBuffersInRequest>::iterator req =
1586 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1587 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1588 for(List<PendingBufferInfo>::iterator j =
1589 req->mPendingBufferList.begin();
1590 j != req->mPendingBufferList.end(); j++) {
1591 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1592 if ((req->frame_number == frame_number) &&
1593 (channel->getStreamTypeMask() &
1594 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1595 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1596 CAM_INTF_PARM_FPS_RANGE, metadata) {
1597 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1598 struct private_handle_t *priv_handle =
1599 (struct private_handle_t *)(*(j->buffer));
1600 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1601 }
1602 }
1603 }
1604 }
1605}
1606
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001607/*==============================================================================
1608 * FUNCTION : updateTimeStampInPendingBuffers
1609 *
1610 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1611 * of a frame number
1612 *
1613 * PARAMETERS :
1614 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1615 * @timestamp : timestamp to be set
1616 *
1617 * RETURN : None
1618 *
1619 *==========================================================================*/
1620void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1621 uint32_t frameNumber, nsecs_t timestamp)
1622{
1623 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1624 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1625 if (req->frame_number != frameNumber)
1626 continue;
1627
1628 for (auto k = req->mPendingBufferList.begin();
1629 k != req->mPendingBufferList.end(); k++ ) {
1630 struct private_handle_t *priv_handle =
1631 (struct private_handle_t *) (*(k->buffer));
1632 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1633 }
1634 }
1635 return;
1636}
1637
Thierry Strudel3d639192016-09-09 11:52:26 -07001638/*===========================================================================
1639 * FUNCTION : configureStreams
1640 *
1641 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1642 * and output streams.
1643 *
1644 * PARAMETERS :
1645 * @stream_list : streams to be configured
1646 *
1647 * RETURN :
1648 *
1649 *==========================================================================*/
1650int QCamera3HardwareInterface::configureStreams(
1651 camera3_stream_configuration_t *streamList)
1652{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001653 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001654 int rc = 0;
1655
1656 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001657 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001658 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001659 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001660
1661 return rc;
1662}
1663
1664/*===========================================================================
1665 * FUNCTION : configureStreamsPerfLocked
1666 *
1667 * DESCRIPTION: configureStreams while perfLock is held.
1668 *
1669 * PARAMETERS :
1670 * @stream_list : streams to be configured
1671 *
1672 * RETURN : int32_t type of status
1673 * NO_ERROR -- success
1674 * none-zero failure code
1675 *==========================================================================*/
1676int QCamera3HardwareInterface::configureStreamsPerfLocked(
1677 camera3_stream_configuration_t *streamList)
1678{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001679 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001680 int rc = 0;
1681
1682 // Sanity check stream_list
1683 if (streamList == NULL) {
1684 LOGE("NULL stream configuration");
1685 return BAD_VALUE;
1686 }
1687 if (streamList->streams == NULL) {
1688 LOGE("NULL stream list");
1689 return BAD_VALUE;
1690 }
1691
1692 if (streamList->num_streams < 1) {
1693 LOGE("Bad number of streams requested: %d",
1694 streamList->num_streams);
1695 return BAD_VALUE;
1696 }
1697
1698 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1699 LOGE("Maximum number of streams %d exceeded: %d",
1700 MAX_NUM_STREAMS, streamList->num_streams);
1701 return BAD_VALUE;
1702 }
1703
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001704 rc = validateUsageFlags(streamList);
1705 if (rc != NO_ERROR) {
1706 return rc;
1707 }
1708
Thierry Strudel3d639192016-09-09 11:52:26 -07001709 mOpMode = streamList->operation_mode;
1710 LOGD("mOpMode: %d", mOpMode);
1711
1712 /* first invalidate all the steams in the mStreamList
1713 * if they appear again, they will be validated */
1714 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1715 it != mStreamInfo.end(); it++) {
1716 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1717 if (channel) {
1718 channel->stop();
1719 }
1720 (*it)->status = INVALID;
1721 }
1722
1723 if (mRawDumpChannel) {
1724 mRawDumpChannel->stop();
1725 delete mRawDumpChannel;
1726 mRawDumpChannel = NULL;
1727 }
1728
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001729 if (mHdrPlusRawSrcChannel) {
1730 mHdrPlusRawSrcChannel->stop();
1731 delete mHdrPlusRawSrcChannel;
1732 mHdrPlusRawSrcChannel = NULL;
1733 }
1734
Thierry Strudel3d639192016-09-09 11:52:26 -07001735 if (mSupportChannel)
1736 mSupportChannel->stop();
1737
1738 if (mAnalysisChannel) {
1739 mAnalysisChannel->stop();
1740 }
1741 if (mMetadataChannel) {
1742 /* If content of mStreamInfo is not 0, there is metadata stream */
1743 mMetadataChannel->stop();
1744 }
1745 if (mChannelHandle) {
1746 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1747 mChannelHandle);
1748 LOGD("stopping channel %d", mChannelHandle);
1749 }
1750
1751 pthread_mutex_lock(&mMutex);
1752
1753 // Check state
1754 switch (mState) {
1755 case INITIALIZED:
1756 case CONFIGURED:
1757 case STARTED:
1758 /* valid state */
1759 break;
1760 default:
1761 LOGE("Invalid state %d", mState);
1762 pthread_mutex_unlock(&mMutex);
1763 return -ENODEV;
1764 }
1765
1766 /* Check whether we have video stream */
1767 m_bIs4KVideo = false;
1768 m_bIsVideo = false;
1769 m_bEisSupportedSize = false;
1770 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001771 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001772 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001773 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001774 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001775 uint32_t videoWidth = 0U;
1776 uint32_t videoHeight = 0U;
1777 size_t rawStreamCnt = 0;
1778 size_t stallStreamCnt = 0;
1779 size_t processedStreamCnt = 0;
1780 // Number of streams on ISP encoder path
1781 size_t numStreamsOnEncoder = 0;
1782 size_t numYuv888OnEncoder = 0;
1783 bool bYuv888OverrideJpeg = false;
1784 cam_dimension_t largeYuv888Size = {0, 0};
1785 cam_dimension_t maxViewfinderSize = {0, 0};
1786 bool bJpegExceeds4K = false;
1787 bool bJpegOnEncoder = false;
1788 bool bUseCommonFeatureMask = false;
1789 cam_feature_mask_t commonFeatureMask = 0;
1790 bool bSmallJpegSize = false;
1791 uint32_t width_ratio;
1792 uint32_t height_ratio;
1793 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1794 camera3_stream_t *inputStream = NULL;
1795 bool isJpeg = false;
1796 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001797 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001798 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001799
1800 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1801
1802 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001803 uint8_t eis_prop_set;
1804 uint32_t maxEisWidth = 0;
1805 uint32_t maxEisHeight = 0;
1806
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001807 // Initialize all instant AEC related variables
1808 mInstantAEC = false;
1809 mResetInstantAEC = false;
1810 mInstantAECSettledFrameNumber = 0;
1811 mAecSkipDisplayFrameBound = 0;
1812 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001813 mCurrFeatureState = 0;
1814 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001815
Thierry Strudel3d639192016-09-09 11:52:26 -07001816 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1817
1818 size_t count = IS_TYPE_MAX;
1819 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1820 for (size_t i = 0; i < count; i++) {
1821 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001822 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1823 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001824 break;
1825 }
1826 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001827
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001828 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001829 maxEisWidth = MAX_EIS_WIDTH;
1830 maxEisHeight = MAX_EIS_HEIGHT;
1831 }
1832
1833 /* EIS setprop control */
1834 char eis_prop[PROPERTY_VALUE_MAX];
1835 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001836 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001837 eis_prop_set = (uint8_t)atoi(eis_prop);
1838
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001839 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001840 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1841
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001842 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1843 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001844
Thierry Strudel3d639192016-09-09 11:52:26 -07001845 /* stream configurations */
1846 for (size_t i = 0; i < streamList->num_streams; i++) {
1847 camera3_stream_t *newStream = streamList->streams[i];
1848 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1849 "height = %d, rotation = %d, usage = 0x%x",
1850 i, newStream->stream_type, newStream->format,
1851 newStream->width, newStream->height, newStream->rotation,
1852 newStream->usage);
1853 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1854 newStream->stream_type == CAMERA3_STREAM_INPUT){
1855 isZsl = true;
1856 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001857 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1858 IS_USAGE_PREVIEW(newStream->usage)) {
1859 isPreview = true;
1860 }
1861
Thierry Strudel3d639192016-09-09 11:52:26 -07001862 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1863 inputStream = newStream;
1864 }
1865
Emilian Peev7650c122017-01-19 08:24:33 -08001866 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1867 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001868 isJpeg = true;
1869 jpegSize.width = newStream->width;
1870 jpegSize.height = newStream->height;
1871 if (newStream->width > VIDEO_4K_WIDTH ||
1872 newStream->height > VIDEO_4K_HEIGHT)
1873 bJpegExceeds4K = true;
1874 }
1875
1876 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1877 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1878 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001879 // In HAL3 we can have multiple different video streams.
1880 // The variables video width and height are used below as
1881 // dimensions of the biggest of them
1882 if (videoWidth < newStream->width ||
1883 videoHeight < newStream->height) {
1884 videoWidth = newStream->width;
1885 videoHeight = newStream->height;
1886 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001887 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1888 (VIDEO_4K_HEIGHT <= newStream->height)) {
1889 m_bIs4KVideo = true;
1890 }
1891 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1892 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001893
Thierry Strudel3d639192016-09-09 11:52:26 -07001894 }
1895 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1896 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1897 switch (newStream->format) {
1898 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001899 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1900 depthPresent = true;
1901 break;
1902 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001903 stallStreamCnt++;
1904 if (isOnEncoder(maxViewfinderSize, newStream->width,
1905 newStream->height)) {
1906 numStreamsOnEncoder++;
1907 bJpegOnEncoder = true;
1908 }
1909 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1910 newStream->width);
1911 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1912 newStream->height);;
1913 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1914 "FATAL: max_downscale_factor cannot be zero and so assert");
1915 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1916 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1917 LOGH("Setting small jpeg size flag to true");
1918 bSmallJpegSize = true;
1919 }
1920 break;
1921 case HAL_PIXEL_FORMAT_RAW10:
1922 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1923 case HAL_PIXEL_FORMAT_RAW16:
1924 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001925 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1926 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1927 pdStatCount++;
1928 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001929 break;
1930 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1931 processedStreamCnt++;
1932 if (isOnEncoder(maxViewfinderSize, newStream->width,
1933 newStream->height)) {
1934 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1935 !IS_USAGE_ZSL(newStream->usage)) {
1936 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1937 }
1938 numStreamsOnEncoder++;
1939 }
1940 break;
1941 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1942 processedStreamCnt++;
1943 if (isOnEncoder(maxViewfinderSize, newStream->width,
1944 newStream->height)) {
1945 // If Yuv888 size is not greater than 4K, set feature mask
1946 // to SUPERSET so that it support concurrent request on
1947 // YUV and JPEG.
1948 if (newStream->width <= VIDEO_4K_WIDTH &&
1949 newStream->height <= VIDEO_4K_HEIGHT) {
1950 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1951 }
1952 numStreamsOnEncoder++;
1953 numYuv888OnEncoder++;
1954 largeYuv888Size.width = newStream->width;
1955 largeYuv888Size.height = newStream->height;
1956 }
1957 break;
1958 default:
1959 processedStreamCnt++;
1960 if (isOnEncoder(maxViewfinderSize, newStream->width,
1961 newStream->height)) {
1962 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1963 numStreamsOnEncoder++;
1964 }
1965 break;
1966 }
1967
1968 }
1969 }
1970
1971 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1972 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1973 !m_bIsVideo) {
1974 m_bEisEnable = false;
1975 }
1976
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001977 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1978 pthread_mutex_unlock(&mMutex);
1979 return -EINVAL;
1980 }
1981
Thierry Strudel54dc9782017-02-15 12:12:10 -08001982 uint8_t forceEnableTnr = 0;
1983 char tnr_prop[PROPERTY_VALUE_MAX];
1984 memset(tnr_prop, 0, sizeof(tnr_prop));
1985 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1986 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1987
Thierry Strudel3d639192016-09-09 11:52:26 -07001988 /* Logic to enable/disable TNR based on specific config size/etc.*/
1989 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001990 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1991 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001992 else if (forceEnableTnr)
1993 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001994
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001995 char videoHdrProp[PROPERTY_VALUE_MAX];
1996 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1997 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1998 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1999
2000 if (hdr_mode_prop == 1 && m_bIsVideo &&
2001 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2002 m_bVideoHdrEnabled = true;
2003 else
2004 m_bVideoHdrEnabled = false;
2005
2006
Thierry Strudel3d639192016-09-09 11:52:26 -07002007 /* Check if num_streams is sane */
2008 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2009 rawStreamCnt > MAX_RAW_STREAMS ||
2010 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2011 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2012 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2013 pthread_mutex_unlock(&mMutex);
2014 return -EINVAL;
2015 }
2016 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002017 if (isZsl && m_bIs4KVideo) {
2018 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002019 pthread_mutex_unlock(&mMutex);
2020 return -EINVAL;
2021 }
2022 /* Check if stream sizes are sane */
2023 if (numStreamsOnEncoder > 2) {
2024 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2025 pthread_mutex_unlock(&mMutex);
2026 return -EINVAL;
2027 } else if (1 < numStreamsOnEncoder){
2028 bUseCommonFeatureMask = true;
2029 LOGH("Multiple streams above max viewfinder size, common mask needed");
2030 }
2031
2032 /* Check if BLOB size is greater than 4k in 4k recording case */
2033 if (m_bIs4KVideo && bJpegExceeds4K) {
2034 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2035 pthread_mutex_unlock(&mMutex);
2036 return -EINVAL;
2037 }
2038
Emilian Peev7650c122017-01-19 08:24:33 -08002039 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2040 depthPresent) {
2041 LOGE("HAL doesn't support depth streams in HFR mode!");
2042 pthread_mutex_unlock(&mMutex);
2043 return -EINVAL;
2044 }
2045
Thierry Strudel3d639192016-09-09 11:52:26 -07002046 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2047 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2048 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2049 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2050 // configurations:
2051 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2052 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2053 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2054 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2055 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2056 __func__);
2057 pthread_mutex_unlock(&mMutex);
2058 return -EINVAL;
2059 }
2060
2061 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2062 // the YUV stream's size is greater or equal to the JPEG size, set common
2063 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2064 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2065 jpegSize.width, jpegSize.height) &&
2066 largeYuv888Size.width > jpegSize.width &&
2067 largeYuv888Size.height > jpegSize.height) {
2068 bYuv888OverrideJpeg = true;
2069 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2070 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2071 }
2072
2073 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2074 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2075 commonFeatureMask);
2076 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2077 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2078
2079 rc = validateStreamDimensions(streamList);
2080 if (rc == NO_ERROR) {
2081 rc = validateStreamRotations(streamList);
2082 }
2083 if (rc != NO_ERROR) {
2084 LOGE("Invalid stream configuration requested!");
2085 pthread_mutex_unlock(&mMutex);
2086 return rc;
2087 }
2088
Emilian Peev0f3c3162017-03-15 12:57:46 +00002089 if (1 < pdStatCount) {
2090 LOGE("HAL doesn't support multiple PD streams");
2091 pthread_mutex_unlock(&mMutex);
2092 return -EINVAL;
2093 }
2094
2095 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2096 (1 == pdStatCount)) {
2097 LOGE("HAL doesn't support PD streams in HFR mode!");
2098 pthread_mutex_unlock(&mMutex);
2099 return -EINVAL;
2100 }
2101
Thierry Strudel3d639192016-09-09 11:52:26 -07002102 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2103 for (size_t i = 0; i < streamList->num_streams; i++) {
2104 camera3_stream_t *newStream = streamList->streams[i];
2105 LOGH("newStream type = %d, stream format = %d "
2106 "stream size : %d x %d, stream rotation = %d",
2107 newStream->stream_type, newStream->format,
2108 newStream->width, newStream->height, newStream->rotation);
2109 //if the stream is in the mStreamList validate it
2110 bool stream_exists = false;
2111 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2112 it != mStreamInfo.end(); it++) {
2113 if ((*it)->stream == newStream) {
2114 QCamera3ProcessingChannel *channel =
2115 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2116 stream_exists = true;
2117 if (channel)
2118 delete channel;
2119 (*it)->status = VALID;
2120 (*it)->stream->priv = NULL;
2121 (*it)->channel = NULL;
2122 }
2123 }
2124 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2125 //new stream
2126 stream_info_t* stream_info;
2127 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2128 if (!stream_info) {
2129 LOGE("Could not allocate stream info");
2130 rc = -ENOMEM;
2131 pthread_mutex_unlock(&mMutex);
2132 return rc;
2133 }
2134 stream_info->stream = newStream;
2135 stream_info->status = VALID;
2136 stream_info->channel = NULL;
2137 mStreamInfo.push_back(stream_info);
2138 }
2139 /* Covers Opaque ZSL and API1 F/W ZSL */
2140 if (IS_USAGE_ZSL(newStream->usage)
2141 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2142 if (zslStream != NULL) {
2143 LOGE("Multiple input/reprocess streams requested!");
2144 pthread_mutex_unlock(&mMutex);
2145 return BAD_VALUE;
2146 }
2147 zslStream = newStream;
2148 }
2149 /* Covers YUV reprocess */
2150 if (inputStream != NULL) {
2151 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2152 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2153 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2154 && inputStream->width == newStream->width
2155 && inputStream->height == newStream->height) {
2156 if (zslStream != NULL) {
2157 /* This scenario indicates multiple YUV streams with same size
2158 * as input stream have been requested, since zsl stream handle
2159 * is solely use for the purpose of overriding the size of streams
2160 * which share h/w streams we will just make a guess here as to
2161 * which of the stream is a ZSL stream, this will be refactored
2162 * once we make generic logic for streams sharing encoder output
2163 */
2164 LOGH("Warning, Multiple ip/reprocess streams requested!");
2165 }
2166 zslStream = newStream;
2167 }
2168 }
2169 }
2170
2171 /* If a zsl stream is set, we know that we have configured at least one input or
2172 bidirectional stream */
2173 if (NULL != zslStream) {
2174 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2175 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2176 mInputStreamInfo.format = zslStream->format;
2177 mInputStreamInfo.usage = zslStream->usage;
2178 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2179 mInputStreamInfo.dim.width,
2180 mInputStreamInfo.dim.height,
2181 mInputStreamInfo.format, mInputStreamInfo.usage);
2182 }
2183
2184 cleanAndSortStreamInfo();
2185 if (mMetadataChannel) {
2186 delete mMetadataChannel;
2187 mMetadataChannel = NULL;
2188 }
2189 if (mSupportChannel) {
2190 delete mSupportChannel;
2191 mSupportChannel = NULL;
2192 }
2193
2194 if (mAnalysisChannel) {
2195 delete mAnalysisChannel;
2196 mAnalysisChannel = NULL;
2197 }
2198
2199 if (mDummyBatchChannel) {
2200 delete mDummyBatchChannel;
2201 mDummyBatchChannel = NULL;
2202 }
2203
Emilian Peev7650c122017-01-19 08:24:33 -08002204 if (mDepthChannel) {
2205 mDepthChannel = NULL;
2206 }
2207
Thierry Strudel2896d122017-02-23 19:18:03 -08002208 char is_type_value[PROPERTY_VALUE_MAX];
2209 property_get("persist.camera.is_type", is_type_value, "4");
2210 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2211
Thierry Strudel3d639192016-09-09 11:52:26 -07002212 //Create metadata channel and initialize it
2213 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2214 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2215 gCamCapability[mCameraId]->color_arrangement);
2216 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2217 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002218 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002219 if (mMetadataChannel == NULL) {
2220 LOGE("failed to allocate metadata channel");
2221 rc = -ENOMEM;
2222 pthread_mutex_unlock(&mMutex);
2223 return rc;
2224 }
2225 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2226 if (rc < 0) {
2227 LOGE("metadata channel initialization failed");
2228 delete mMetadataChannel;
2229 mMetadataChannel = NULL;
2230 pthread_mutex_unlock(&mMutex);
2231 return rc;
2232 }
2233
Thierry Strudel2896d122017-02-23 19:18:03 -08002234 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002235 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002236 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002237 // Keep track of preview/video streams indices.
2238 // There could be more than one preview streams, but only one video stream.
2239 int32_t video_stream_idx = -1;
2240 int32_t preview_stream_idx[streamList->num_streams];
2241 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002242 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2243 /* Allocate channel objects for the requested streams */
2244 for (size_t i = 0; i < streamList->num_streams; i++) {
2245 camera3_stream_t *newStream = streamList->streams[i];
2246 uint32_t stream_usage = newStream->usage;
2247 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2248 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2249 struct camera_info *p_info = NULL;
2250 pthread_mutex_lock(&gCamLock);
2251 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2252 pthread_mutex_unlock(&gCamLock);
2253 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2254 || IS_USAGE_ZSL(newStream->usage)) &&
2255 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002256 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002257 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002258 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2259 if (bUseCommonFeatureMask)
2260 zsl_ppmask = commonFeatureMask;
2261 else
2262 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002263 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002264 if (numStreamsOnEncoder > 0)
2265 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2266 else
2267 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002268 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002269 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002270 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002271 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002272 LOGH("Input stream configured, reprocess config");
2273 } else {
2274 //for non zsl streams find out the format
2275 switch (newStream->format) {
2276 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2277 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002278 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002279 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2280 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2281 /* add additional features to pp feature mask */
2282 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2283 mStreamConfigInfo.num_streams);
2284
2285 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2286 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2287 CAM_STREAM_TYPE_VIDEO;
2288 if (m_bTnrEnabled && m_bTnrVideo) {
2289 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2290 CAM_QCOM_FEATURE_CPP_TNR;
2291 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2292 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2293 ~CAM_QCOM_FEATURE_CDS;
2294 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002295 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2296 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2297 CAM_QTI_FEATURE_PPEISCORE;
2298 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002299 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002300 } else {
2301 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2302 CAM_STREAM_TYPE_PREVIEW;
2303 if (m_bTnrEnabled && m_bTnrPreview) {
2304 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2305 CAM_QCOM_FEATURE_CPP_TNR;
2306 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2307 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2308 ~CAM_QCOM_FEATURE_CDS;
2309 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002310 if(!m_bSwTnrPreview) {
2311 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2312 ~CAM_QTI_FEATURE_SW_TNR;
2313 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002314 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002315 padding_info.width_padding = mSurfaceStridePadding;
2316 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002317 previewSize.width = (int32_t)newStream->width;
2318 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002319 }
2320 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2321 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2322 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2323 newStream->height;
2324 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2325 newStream->width;
2326 }
2327 }
2328 break;
2329 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002330 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002331 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2332 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2333 if (bUseCommonFeatureMask)
2334 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2335 commonFeatureMask;
2336 else
2337 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2338 CAM_QCOM_FEATURE_NONE;
2339 } else {
2340 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2341 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2342 }
2343 break;
2344 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002345 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002346 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2347 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2348 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2349 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2350 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002351 /* Remove rotation if it is not supported
2352 for 4K LiveVideo snapshot case (online processing) */
2353 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2354 CAM_QCOM_FEATURE_ROTATION)) {
2355 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2356 &= ~CAM_QCOM_FEATURE_ROTATION;
2357 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002358 } else {
2359 if (bUseCommonFeatureMask &&
2360 isOnEncoder(maxViewfinderSize, newStream->width,
2361 newStream->height)) {
2362 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2363 } else {
2364 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2365 }
2366 }
2367 if (isZsl) {
2368 if (zslStream) {
2369 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2370 (int32_t)zslStream->width;
2371 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2372 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002373 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2374 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002375 } else {
2376 LOGE("Error, No ZSL stream identified");
2377 pthread_mutex_unlock(&mMutex);
2378 return -EINVAL;
2379 }
2380 } else if (m_bIs4KVideo) {
2381 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2382 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2383 } else if (bYuv888OverrideJpeg) {
2384 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2385 (int32_t)largeYuv888Size.width;
2386 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2387 (int32_t)largeYuv888Size.height;
2388 }
2389 break;
2390 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2391 case HAL_PIXEL_FORMAT_RAW16:
2392 case HAL_PIXEL_FORMAT_RAW10:
2393 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2394 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2395 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002396 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2397 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2398 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2399 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2400 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2401 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2402 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2403 gCamCapability[mCameraId]->dt[mPDIndex];
2404 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2405 gCamCapability[mCameraId]->vc[mPDIndex];
2406 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002407 break;
2408 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002409 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002410 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2411 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2412 break;
2413 }
2414 }
2415
2416 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2417 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2418 gCamCapability[mCameraId]->color_arrangement);
2419
2420 if (newStream->priv == NULL) {
2421 //New stream, construct channel
2422 switch (newStream->stream_type) {
2423 case CAMERA3_STREAM_INPUT:
2424 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2425 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2426 break;
2427 case CAMERA3_STREAM_BIDIRECTIONAL:
2428 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2429 GRALLOC_USAGE_HW_CAMERA_WRITE;
2430 break;
2431 case CAMERA3_STREAM_OUTPUT:
2432 /* For video encoding stream, set read/write rarely
2433 * flag so that they may be set to un-cached */
2434 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2435 newStream->usage |=
2436 (GRALLOC_USAGE_SW_READ_RARELY |
2437 GRALLOC_USAGE_SW_WRITE_RARELY |
2438 GRALLOC_USAGE_HW_CAMERA_WRITE);
2439 else if (IS_USAGE_ZSL(newStream->usage))
2440 {
2441 LOGD("ZSL usage flag skipping");
2442 }
2443 else if (newStream == zslStream
2444 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2445 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2446 } else
2447 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2448 break;
2449 default:
2450 LOGE("Invalid stream_type %d", newStream->stream_type);
2451 break;
2452 }
2453
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002454 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002455 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2456 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2457 QCamera3ProcessingChannel *channel = NULL;
2458 switch (newStream->format) {
2459 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2460 if ((newStream->usage &
2461 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2462 (streamList->operation_mode ==
2463 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2464 ) {
2465 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2466 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002467 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002468 this,
2469 newStream,
2470 (cam_stream_type_t)
2471 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2472 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2473 mMetadataChannel,
2474 0); //heap buffers are not required for HFR video channel
2475 if (channel == NULL) {
2476 LOGE("allocation of channel failed");
2477 pthread_mutex_unlock(&mMutex);
2478 return -ENOMEM;
2479 }
2480 //channel->getNumBuffers() will return 0 here so use
2481 //MAX_INFLIGH_HFR_REQUESTS
2482 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2483 newStream->priv = channel;
2484 LOGI("num video buffers in HFR mode: %d",
2485 MAX_INFLIGHT_HFR_REQUESTS);
2486 } else {
2487 /* Copy stream contents in HFR preview only case to create
2488 * dummy batch channel so that sensor streaming is in
2489 * HFR mode */
2490 if (!m_bIsVideo && (streamList->operation_mode ==
2491 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2492 mDummyBatchStream = *newStream;
2493 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002494 int bufferCount = MAX_INFLIGHT_REQUESTS;
2495 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2496 CAM_STREAM_TYPE_VIDEO) {
2497 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2498 bufferCount = MAX_VIDEO_BUFFERS;
2499 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002500 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2501 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002502 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002503 this,
2504 newStream,
2505 (cam_stream_type_t)
2506 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2507 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2508 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002509 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002510 if (channel == NULL) {
2511 LOGE("allocation of channel failed");
2512 pthread_mutex_unlock(&mMutex);
2513 return -ENOMEM;
2514 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002515 /* disable UBWC for preview, though supported,
2516 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002517 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002518 (previewSize.width == (int32_t)videoWidth)&&
2519 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002520 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002521 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002522 channel->setUBWCEnabled(forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002523 newStream->max_buffers = channel->getNumBuffers();
2524 newStream->priv = channel;
2525 }
2526 break;
2527 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2528 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2529 mChannelHandle,
2530 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002531 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002532 this,
2533 newStream,
2534 (cam_stream_type_t)
2535 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2536 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2537 mMetadataChannel);
2538 if (channel == NULL) {
2539 LOGE("allocation of YUV channel failed");
2540 pthread_mutex_unlock(&mMutex);
2541 return -ENOMEM;
2542 }
2543 newStream->max_buffers = channel->getNumBuffers();
2544 newStream->priv = channel;
2545 break;
2546 }
2547 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2548 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002549 case HAL_PIXEL_FORMAT_RAW10: {
2550 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2551 (HAL_DATASPACE_DEPTH != newStream->data_space))
2552 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002553 mRawChannel = new QCamera3RawChannel(
2554 mCameraHandle->camera_handle, mChannelHandle,
2555 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002556 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002557 this, newStream,
2558 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002559 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002560 if (mRawChannel == NULL) {
2561 LOGE("allocation of raw channel failed");
2562 pthread_mutex_unlock(&mMutex);
2563 return -ENOMEM;
2564 }
2565 newStream->max_buffers = mRawChannel->getNumBuffers();
2566 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2567 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002568 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002569 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002570 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2571 mDepthChannel = new QCamera3DepthChannel(
2572 mCameraHandle->camera_handle, mChannelHandle,
2573 mCameraHandle->ops, NULL, NULL, &padding_info,
2574 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2575 mMetadataChannel);
2576 if (NULL == mDepthChannel) {
2577 LOGE("Allocation of depth channel failed");
2578 pthread_mutex_unlock(&mMutex);
2579 return NO_MEMORY;
2580 }
2581 newStream->priv = mDepthChannel;
2582 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2583 } else {
2584 // Max live snapshot inflight buffer is 1. This is to mitigate
2585 // frame drop issues for video snapshot. The more buffers being
2586 // allocated, the more frame drops there are.
2587 mPictureChannel = new QCamera3PicChannel(
2588 mCameraHandle->camera_handle, mChannelHandle,
2589 mCameraHandle->ops, captureResultCb,
2590 setBufferErrorStatus, &padding_info, this, newStream,
2591 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2592 m_bIs4KVideo, isZsl, mMetadataChannel,
2593 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2594 if (mPictureChannel == NULL) {
2595 LOGE("allocation of channel failed");
2596 pthread_mutex_unlock(&mMutex);
2597 return -ENOMEM;
2598 }
2599 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2600 newStream->max_buffers = mPictureChannel->getNumBuffers();
2601 mPictureChannel->overrideYuvSize(
2602 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2603 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002604 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002605 break;
2606
2607 default:
2608 LOGE("not a supported format 0x%x", newStream->format);
2609 break;
2610 }
2611 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2612 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2613 } else {
2614 LOGE("Error, Unknown stream type");
2615 pthread_mutex_unlock(&mMutex);
2616 return -EINVAL;
2617 }
2618
2619 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002620 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2621 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002622 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002623 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002624 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2625 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2626 }
2627 }
2628
2629 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2630 it != mStreamInfo.end(); it++) {
2631 if ((*it)->stream == newStream) {
2632 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2633 break;
2634 }
2635 }
2636 } else {
2637 // Channel already exists for this stream
2638 // Do nothing for now
2639 }
2640 padding_info = gCamCapability[mCameraId]->padding_info;
2641
Emilian Peev7650c122017-01-19 08:24:33 -08002642 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002643 * since there is no real stream associated with it
2644 */
Emilian Peev7650c122017-01-19 08:24:33 -08002645 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002646 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2647 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002648 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002649 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002650 }
2651
Binhao Lincdb362a2017-04-20 13:31:54 -07002652 // By default, preview stream TNR is disabled.
2653 // Enable TNR to the preview stream if all conditions below are satisfied:
2654 // 1. resolution <= 1080p.
2655 // 2. preview resolution == video resolution.
2656 // 3. video stream TNR is enabled.
2657 // 4. EIS2.0
2658 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2659 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2660 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2661 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2662 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2663 video_stream->width == preview_stream->width &&
2664 video_stream->height == preview_stream->height) {
2665 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2666 CAM_QCOM_FEATURE_CPP_TNR;
2667 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2668 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2669 ~CAM_QCOM_FEATURE_CDS;
2670 }
2671 }
2672
Thierry Strudel2896d122017-02-23 19:18:03 -08002673 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2674 onlyRaw = false;
2675 }
2676
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002677 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002678 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002679 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002680 cam_analysis_info_t analysisInfo;
2681 int32_t ret = NO_ERROR;
2682 ret = mCommon.getAnalysisInfo(
2683 FALSE,
2684 analysisFeatureMask,
2685 &analysisInfo);
2686 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002687 cam_color_filter_arrangement_t analysis_color_arrangement =
2688 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2689 CAM_FILTER_ARRANGEMENT_Y :
2690 gCamCapability[mCameraId]->color_arrangement);
2691 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2692 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002693 cam_dimension_t analysisDim;
2694 analysisDim = mCommon.getMatchingDimension(previewSize,
2695 analysisInfo.analysis_recommended_res);
2696
2697 mAnalysisChannel = new QCamera3SupportChannel(
2698 mCameraHandle->camera_handle,
2699 mChannelHandle,
2700 mCameraHandle->ops,
2701 &analysisInfo.analysis_padding_info,
2702 analysisFeatureMask,
2703 CAM_STREAM_TYPE_ANALYSIS,
2704 &analysisDim,
2705 (analysisInfo.analysis_format
2706 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2707 : CAM_FORMAT_YUV_420_NV21),
2708 analysisInfo.hw_analysis_supported,
2709 gCamCapability[mCameraId]->color_arrangement,
2710 this,
2711 0); // force buffer count to 0
2712 } else {
2713 LOGW("getAnalysisInfo failed, ret = %d", ret);
2714 }
2715 if (!mAnalysisChannel) {
2716 LOGW("Analysis channel cannot be created");
2717 }
2718 }
2719
Thierry Strudel3d639192016-09-09 11:52:26 -07002720 //RAW DUMP channel
2721 if (mEnableRawDump && isRawStreamRequested == false){
2722 cam_dimension_t rawDumpSize;
2723 rawDumpSize = getMaxRawSize(mCameraId);
2724 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2725 setPAAFSupport(rawDumpFeatureMask,
2726 CAM_STREAM_TYPE_RAW,
2727 gCamCapability[mCameraId]->color_arrangement);
2728 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2729 mChannelHandle,
2730 mCameraHandle->ops,
2731 rawDumpSize,
2732 &padding_info,
2733 this, rawDumpFeatureMask);
2734 if (!mRawDumpChannel) {
2735 LOGE("Raw Dump channel cannot be created");
2736 pthread_mutex_unlock(&mMutex);
2737 return -ENOMEM;
2738 }
2739 }
2740
Thierry Strudel3d639192016-09-09 11:52:26 -07002741 if (mAnalysisChannel) {
2742 cam_analysis_info_t analysisInfo;
2743 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2744 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2745 CAM_STREAM_TYPE_ANALYSIS;
2746 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2747 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002748 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002749 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2750 &analysisInfo);
2751 if (rc != NO_ERROR) {
2752 LOGE("getAnalysisInfo failed, ret = %d", rc);
2753 pthread_mutex_unlock(&mMutex);
2754 return rc;
2755 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002756 cam_color_filter_arrangement_t analysis_color_arrangement =
2757 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2758 CAM_FILTER_ARRANGEMENT_Y :
2759 gCamCapability[mCameraId]->color_arrangement);
2760 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2761 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2762 analysis_color_arrangement);
2763
Thierry Strudel3d639192016-09-09 11:52:26 -07002764 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002765 mCommon.getMatchingDimension(previewSize,
2766 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002767 mStreamConfigInfo.num_streams++;
2768 }
2769
Thierry Strudel2896d122017-02-23 19:18:03 -08002770 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002771 cam_analysis_info_t supportInfo;
2772 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2773 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2774 setPAAFSupport(callbackFeatureMask,
2775 CAM_STREAM_TYPE_CALLBACK,
2776 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002777 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002778 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002779 if (ret != NO_ERROR) {
2780 /* Ignore the error for Mono camera
2781 * because the PAAF bit mask is only set
2782 * for CAM_STREAM_TYPE_ANALYSIS stream type
2783 */
2784 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2785 LOGW("getAnalysisInfo failed, ret = %d", ret);
2786 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002787 }
2788 mSupportChannel = new QCamera3SupportChannel(
2789 mCameraHandle->camera_handle,
2790 mChannelHandle,
2791 mCameraHandle->ops,
2792 &gCamCapability[mCameraId]->padding_info,
2793 callbackFeatureMask,
2794 CAM_STREAM_TYPE_CALLBACK,
2795 &QCamera3SupportChannel::kDim,
2796 CAM_FORMAT_YUV_420_NV21,
2797 supportInfo.hw_analysis_supported,
2798 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002799 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002800 if (!mSupportChannel) {
2801 LOGE("dummy channel cannot be created");
2802 pthread_mutex_unlock(&mMutex);
2803 return -ENOMEM;
2804 }
2805 }
2806
2807 if (mSupportChannel) {
2808 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2809 QCamera3SupportChannel::kDim;
2810 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2811 CAM_STREAM_TYPE_CALLBACK;
2812 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2813 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2814 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2815 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2816 gCamCapability[mCameraId]->color_arrangement);
2817 mStreamConfigInfo.num_streams++;
2818 }
2819
2820 if (mRawDumpChannel) {
2821 cam_dimension_t rawSize;
2822 rawSize = getMaxRawSize(mCameraId);
2823 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2824 rawSize;
2825 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2826 CAM_STREAM_TYPE_RAW;
2827 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2828 CAM_QCOM_FEATURE_NONE;
2829 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2830 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2831 gCamCapability[mCameraId]->color_arrangement);
2832 mStreamConfigInfo.num_streams++;
2833 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002834
2835 if (mHdrPlusRawSrcChannel) {
2836 cam_dimension_t rawSize;
2837 rawSize = getMaxRawSize(mCameraId);
2838 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2839 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2840 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2841 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2842 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2843 gCamCapability[mCameraId]->color_arrangement);
2844 mStreamConfigInfo.num_streams++;
2845 }
2846
Thierry Strudel3d639192016-09-09 11:52:26 -07002847 /* In HFR mode, if video stream is not added, create a dummy channel so that
2848 * ISP can create a batch mode even for preview only case. This channel is
2849 * never 'start'ed (no stream-on), it is only 'initialized' */
2850 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2851 !m_bIsVideo) {
2852 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2853 setPAAFSupport(dummyFeatureMask,
2854 CAM_STREAM_TYPE_VIDEO,
2855 gCamCapability[mCameraId]->color_arrangement);
2856 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2857 mChannelHandle,
2858 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002859 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002860 this,
2861 &mDummyBatchStream,
2862 CAM_STREAM_TYPE_VIDEO,
2863 dummyFeatureMask,
2864 mMetadataChannel);
2865 if (NULL == mDummyBatchChannel) {
2866 LOGE("creation of mDummyBatchChannel failed."
2867 "Preview will use non-hfr sensor mode ");
2868 }
2869 }
2870 if (mDummyBatchChannel) {
2871 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2872 mDummyBatchStream.width;
2873 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2874 mDummyBatchStream.height;
2875 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2876 CAM_STREAM_TYPE_VIDEO;
2877 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2878 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2879 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2880 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2881 gCamCapability[mCameraId]->color_arrangement);
2882 mStreamConfigInfo.num_streams++;
2883 }
2884
2885 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2886 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002887 m_bIs4KVideo ? 0 :
2888 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002889
2890 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2891 for (pendingRequestIterator i = mPendingRequestsList.begin();
2892 i != mPendingRequestsList.end();) {
2893 i = erasePendingRequest(i);
2894 }
2895 mPendingFrameDropList.clear();
2896 // Initialize/Reset the pending buffers list
2897 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2898 req.mPendingBufferList.clear();
2899 }
2900 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2901
Thierry Strudel3d639192016-09-09 11:52:26 -07002902 mCurJpegMeta.clear();
2903 //Get min frame duration for this streams configuration
2904 deriveMinFrameDuration();
2905
Chien-Yu Chenee335912017-02-09 17:53:20 -08002906 mFirstPreviewIntentSeen = false;
2907
2908 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002909 {
2910 Mutex::Autolock l(gHdrPlusClientLock);
2911 disableHdrPlusModeLocked();
2912 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002913
Thierry Strudel3d639192016-09-09 11:52:26 -07002914 // Update state
2915 mState = CONFIGURED;
2916
2917 pthread_mutex_unlock(&mMutex);
2918
2919 return rc;
2920}
2921
2922/*===========================================================================
2923 * FUNCTION : validateCaptureRequest
2924 *
2925 * DESCRIPTION: validate a capture request from camera service
2926 *
2927 * PARAMETERS :
2928 * @request : request from framework to process
2929 *
2930 * RETURN :
2931 *
2932 *==========================================================================*/
2933int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002934 camera3_capture_request_t *request,
2935 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002936{
2937 ssize_t idx = 0;
2938 const camera3_stream_buffer_t *b;
2939 CameraMetadata meta;
2940
2941 /* Sanity check the request */
2942 if (request == NULL) {
2943 LOGE("NULL capture request");
2944 return BAD_VALUE;
2945 }
2946
2947 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2948 /*settings cannot be null for the first request*/
2949 return BAD_VALUE;
2950 }
2951
2952 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002953 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2954 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002955 LOGE("Request %d: No output buffers provided!",
2956 __FUNCTION__, frameNumber);
2957 return BAD_VALUE;
2958 }
2959 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2960 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2961 request->num_output_buffers, MAX_NUM_STREAMS);
2962 return BAD_VALUE;
2963 }
2964 if (request->input_buffer != NULL) {
2965 b = request->input_buffer;
2966 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2967 LOGE("Request %d: Buffer %ld: Status not OK!",
2968 frameNumber, (long)idx);
2969 return BAD_VALUE;
2970 }
2971 if (b->release_fence != -1) {
2972 LOGE("Request %d: Buffer %ld: Has a release fence!",
2973 frameNumber, (long)idx);
2974 return BAD_VALUE;
2975 }
2976 if (b->buffer == NULL) {
2977 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2978 frameNumber, (long)idx);
2979 return BAD_VALUE;
2980 }
2981 }
2982
2983 // Validate all buffers
2984 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002985 if (b == NULL) {
2986 return BAD_VALUE;
2987 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002988 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002989 QCamera3ProcessingChannel *channel =
2990 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2991 if (channel == NULL) {
2992 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2993 frameNumber, (long)idx);
2994 return BAD_VALUE;
2995 }
2996 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2997 LOGE("Request %d: Buffer %ld: Status not OK!",
2998 frameNumber, (long)idx);
2999 return BAD_VALUE;
3000 }
3001 if (b->release_fence != -1) {
3002 LOGE("Request %d: Buffer %ld: Has a release fence!",
3003 frameNumber, (long)idx);
3004 return BAD_VALUE;
3005 }
3006 if (b->buffer == NULL) {
3007 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3008 frameNumber, (long)idx);
3009 return BAD_VALUE;
3010 }
3011 if (*(b->buffer) == NULL) {
3012 LOGE("Request %d: Buffer %ld: NULL private handle!",
3013 frameNumber, (long)idx);
3014 return BAD_VALUE;
3015 }
3016 idx++;
3017 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003018 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003019 return NO_ERROR;
3020}
3021
3022/*===========================================================================
3023 * FUNCTION : deriveMinFrameDuration
3024 *
3025 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3026 * on currently configured streams.
3027 *
3028 * PARAMETERS : NONE
3029 *
3030 * RETURN : NONE
3031 *
3032 *==========================================================================*/
3033void QCamera3HardwareInterface::deriveMinFrameDuration()
3034{
3035 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3036
3037 maxJpegDim = 0;
3038 maxProcessedDim = 0;
3039 maxRawDim = 0;
3040
3041 // Figure out maximum jpeg, processed, and raw dimensions
3042 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3043 it != mStreamInfo.end(); it++) {
3044
3045 // Input stream doesn't have valid stream_type
3046 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3047 continue;
3048
3049 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3050 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3051 if (dimension > maxJpegDim)
3052 maxJpegDim = dimension;
3053 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3054 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3055 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3056 if (dimension > maxRawDim)
3057 maxRawDim = dimension;
3058 } else {
3059 if (dimension > maxProcessedDim)
3060 maxProcessedDim = dimension;
3061 }
3062 }
3063
3064 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3065 MAX_SIZES_CNT);
3066
3067 //Assume all jpeg dimensions are in processed dimensions.
3068 if (maxJpegDim > maxProcessedDim)
3069 maxProcessedDim = maxJpegDim;
3070 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3071 if (maxProcessedDim > maxRawDim) {
3072 maxRawDim = INT32_MAX;
3073
3074 for (size_t i = 0; i < count; i++) {
3075 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3076 gCamCapability[mCameraId]->raw_dim[i].height;
3077 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3078 maxRawDim = dimension;
3079 }
3080 }
3081
3082 //Find minimum durations for processed, jpeg, and raw
3083 for (size_t i = 0; i < count; i++) {
3084 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3085 gCamCapability[mCameraId]->raw_dim[i].height) {
3086 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3087 break;
3088 }
3089 }
3090 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3091 for (size_t i = 0; i < count; i++) {
3092 if (maxProcessedDim ==
3093 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3094 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3095 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3096 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3097 break;
3098 }
3099 }
3100}
3101
3102/*===========================================================================
3103 * FUNCTION : getMinFrameDuration
3104 *
3105 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3106 * and current request configuration.
3107 *
3108 * PARAMETERS : @request: requset sent by the frameworks
3109 *
3110 * RETURN : min farme duration for a particular request
3111 *
3112 *==========================================================================*/
3113int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3114{
3115 bool hasJpegStream = false;
3116 bool hasRawStream = false;
3117 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3118 const camera3_stream_t *stream = request->output_buffers[i].stream;
3119 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3120 hasJpegStream = true;
3121 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3122 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3123 stream->format == HAL_PIXEL_FORMAT_RAW16)
3124 hasRawStream = true;
3125 }
3126
3127 if (!hasJpegStream)
3128 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3129 else
3130 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3131}
3132
3133/*===========================================================================
3134 * FUNCTION : handleBuffersDuringFlushLock
3135 *
3136 * DESCRIPTION: Account for buffers returned from back-end during flush
3137 * This function is executed while mMutex is held by the caller.
3138 *
3139 * PARAMETERS :
3140 * @buffer: image buffer for the callback
3141 *
3142 * RETURN :
3143 *==========================================================================*/
3144void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3145{
3146 bool buffer_found = false;
3147 for (List<PendingBuffersInRequest>::iterator req =
3148 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3149 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3150 for (List<PendingBufferInfo>::iterator i =
3151 req->mPendingBufferList.begin();
3152 i != req->mPendingBufferList.end(); i++) {
3153 if (i->buffer == buffer->buffer) {
3154 mPendingBuffersMap.numPendingBufsAtFlush--;
3155 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3156 buffer->buffer, req->frame_number,
3157 mPendingBuffersMap.numPendingBufsAtFlush);
3158 buffer_found = true;
3159 break;
3160 }
3161 }
3162 if (buffer_found) {
3163 break;
3164 }
3165 }
3166 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3167 //signal the flush()
3168 LOGD("All buffers returned to HAL. Continue flush");
3169 pthread_cond_signal(&mBuffersCond);
3170 }
3171}
3172
Thierry Strudel3d639192016-09-09 11:52:26 -07003173/*===========================================================================
3174 * FUNCTION : handleBatchMetadata
3175 *
3176 * DESCRIPTION: Handles metadata buffer callback in batch mode
3177 *
3178 * PARAMETERS : @metadata_buf: metadata buffer
3179 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3180 * the meta buf in this method
3181 *
3182 * RETURN :
3183 *
3184 *==========================================================================*/
3185void QCamera3HardwareInterface::handleBatchMetadata(
3186 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3187{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003188 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003189
3190 if (NULL == metadata_buf) {
3191 LOGE("metadata_buf is NULL");
3192 return;
3193 }
3194 /* In batch mode, the metdata will contain the frame number and timestamp of
3195 * the last frame in the batch. Eg: a batch containing buffers from request
3196 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3197 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3198 * multiple process_capture_results */
3199 metadata_buffer_t *metadata =
3200 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3201 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3202 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3203 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3204 uint32_t frame_number = 0, urgent_frame_number = 0;
3205 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3206 bool invalid_metadata = false;
3207 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3208 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003209 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003210
3211 int32_t *p_frame_number_valid =
3212 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3213 uint32_t *p_frame_number =
3214 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3215 int64_t *p_capture_time =
3216 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3217 int32_t *p_urgent_frame_number_valid =
3218 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3219 uint32_t *p_urgent_frame_number =
3220 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3221
3222 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3223 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3224 (NULL == p_urgent_frame_number)) {
3225 LOGE("Invalid metadata");
3226 invalid_metadata = true;
3227 } else {
3228 frame_number_valid = *p_frame_number_valid;
3229 last_frame_number = *p_frame_number;
3230 last_frame_capture_time = *p_capture_time;
3231 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3232 last_urgent_frame_number = *p_urgent_frame_number;
3233 }
3234
3235 /* In batchmode, when no video buffers are requested, set_parms are sent
3236 * for every capture_request. The difference between consecutive urgent
3237 * frame numbers and frame numbers should be used to interpolate the
3238 * corresponding frame numbers and time stamps */
3239 pthread_mutex_lock(&mMutex);
3240 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003241 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3242 if(idx < 0) {
3243 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3244 last_urgent_frame_number);
3245 mState = ERROR;
3246 pthread_mutex_unlock(&mMutex);
3247 return;
3248 }
3249 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003250 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3251 first_urgent_frame_number;
3252
3253 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3254 urgent_frame_number_valid,
3255 first_urgent_frame_number, last_urgent_frame_number);
3256 }
3257
3258 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003259 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3260 if(idx < 0) {
3261 LOGE("Invalid frame number received: %d. Irrecoverable error",
3262 last_frame_number);
3263 mState = ERROR;
3264 pthread_mutex_unlock(&mMutex);
3265 return;
3266 }
3267 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003268 frameNumDiff = last_frame_number + 1 -
3269 first_frame_number;
3270 mPendingBatchMap.removeItem(last_frame_number);
3271
3272 LOGD("frm: valid: %d frm_num: %d - %d",
3273 frame_number_valid,
3274 first_frame_number, last_frame_number);
3275
3276 }
3277 pthread_mutex_unlock(&mMutex);
3278
3279 if (urgent_frame_number_valid || frame_number_valid) {
3280 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3281 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3282 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3283 urgentFrameNumDiff, last_urgent_frame_number);
3284 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3285 LOGE("frameNumDiff: %d frameNum: %d",
3286 frameNumDiff, last_frame_number);
3287 }
3288
3289 for (size_t i = 0; i < loopCount; i++) {
3290 /* handleMetadataWithLock is called even for invalid_metadata for
3291 * pipeline depth calculation */
3292 if (!invalid_metadata) {
3293 /* Infer frame number. Batch metadata contains frame number of the
3294 * last frame */
3295 if (urgent_frame_number_valid) {
3296 if (i < urgentFrameNumDiff) {
3297 urgent_frame_number =
3298 first_urgent_frame_number + i;
3299 LOGD("inferred urgent frame_number: %d",
3300 urgent_frame_number);
3301 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3302 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3303 } else {
3304 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3305 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3306 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3307 }
3308 }
3309
3310 /* Infer frame number. Batch metadata contains frame number of the
3311 * last frame */
3312 if (frame_number_valid) {
3313 if (i < frameNumDiff) {
3314 frame_number = first_frame_number + i;
3315 LOGD("inferred frame_number: %d", frame_number);
3316 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3317 CAM_INTF_META_FRAME_NUMBER, frame_number);
3318 } else {
3319 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3320 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3321 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3322 }
3323 }
3324
3325 if (last_frame_capture_time) {
3326 //Infer timestamp
3327 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003328 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003329 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003330 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003331 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3332 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3333 LOGD("batch capture_time: %lld, capture_time: %lld",
3334 last_frame_capture_time, capture_time);
3335 }
3336 }
3337 pthread_mutex_lock(&mMutex);
3338 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003339 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003340 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3341 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003342 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003343 pthread_mutex_unlock(&mMutex);
3344 }
3345
3346 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003347 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003348 mMetadataChannel->bufDone(metadata_buf);
3349 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003350 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003351 }
3352}
3353
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003354void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3355 camera3_error_msg_code_t errorCode)
3356{
3357 camera3_notify_msg_t notify_msg;
3358 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3359 notify_msg.type = CAMERA3_MSG_ERROR;
3360 notify_msg.message.error.error_code = errorCode;
3361 notify_msg.message.error.error_stream = NULL;
3362 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003363 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003364
3365 return;
3366}
Thierry Strudel3d639192016-09-09 11:52:26 -07003367/*===========================================================================
3368 * FUNCTION : handleMetadataWithLock
3369 *
3370 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3371 *
3372 * PARAMETERS : @metadata_buf: metadata buffer
3373 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3374 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003375 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3376 * last urgent metadata in a batch. Always true for non-batch mode
3377 * @lastMetadataInBatch: Boolean to indicate whether this is the
3378 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003379 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3380 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003381 *
3382 * RETURN :
3383 *
3384 *==========================================================================*/
3385void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003386 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003387 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3388 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003389{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003390 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003391 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3392 //during flush do not send metadata from this thread
3393 LOGD("not sending metadata during flush or when mState is error");
3394 if (free_and_bufdone_meta_buf) {
3395 mMetadataChannel->bufDone(metadata_buf);
3396 free(metadata_buf);
3397 }
3398 return;
3399 }
3400
3401 //not in flush
3402 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3403 int32_t frame_number_valid, urgent_frame_number_valid;
3404 uint32_t frame_number, urgent_frame_number;
3405 int64_t capture_time;
3406 nsecs_t currentSysTime;
3407
3408 int32_t *p_frame_number_valid =
3409 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3410 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3411 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3412 int32_t *p_urgent_frame_number_valid =
3413 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3414 uint32_t *p_urgent_frame_number =
3415 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3416 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3417 metadata) {
3418 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3419 *p_frame_number_valid, *p_frame_number);
3420 }
3421
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003422 camera_metadata_t *resultMetadata = nullptr;
3423
Thierry Strudel3d639192016-09-09 11:52:26 -07003424 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3425 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3426 LOGE("Invalid metadata");
3427 if (free_and_bufdone_meta_buf) {
3428 mMetadataChannel->bufDone(metadata_buf);
3429 free(metadata_buf);
3430 }
3431 goto done_metadata;
3432 }
3433 frame_number_valid = *p_frame_number_valid;
3434 frame_number = *p_frame_number;
3435 capture_time = *p_capture_time;
3436 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3437 urgent_frame_number = *p_urgent_frame_number;
3438 currentSysTime = systemTime(CLOCK_MONOTONIC);
3439
3440 // Detect if buffers from any requests are overdue
3441 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003442 int64_t timeout;
3443 {
3444 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3445 // If there is a pending HDR+ request, the following requests may be blocked until the
3446 // HDR+ request is done. So allow a longer timeout.
3447 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3448 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3449 }
3450
3451 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003452 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003453 assert(missed.stream->priv);
3454 if (missed.stream->priv) {
3455 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3456 assert(ch->mStreams[0]);
3457 if (ch->mStreams[0]) {
3458 LOGE("Cancel missing frame = %d, buffer = %p,"
3459 "stream type = %d, stream format = %d",
3460 req.frame_number, missed.buffer,
3461 ch->mStreams[0]->getMyType(), missed.stream->format);
3462 ch->timeoutFrame(req.frame_number);
3463 }
3464 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003465 }
3466 }
3467 }
3468 //Partial result on process_capture_result for timestamp
3469 if (urgent_frame_number_valid) {
3470 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3471 urgent_frame_number, capture_time);
3472
3473 //Recieved an urgent Frame Number, handle it
3474 //using partial results
3475 for (pendingRequestIterator i =
3476 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3477 LOGD("Iterator Frame = %d urgent frame = %d",
3478 i->frame_number, urgent_frame_number);
3479
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003480 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003481 (i->partial_result_cnt == 0)) {
3482 LOGE("Error: HAL missed urgent metadata for frame number %d",
3483 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003484 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003485 }
3486
3487 if (i->frame_number == urgent_frame_number &&
3488 i->bUrgentReceived == 0) {
3489
3490 camera3_capture_result_t result;
3491 memset(&result, 0, sizeof(camera3_capture_result_t));
3492
3493 i->partial_result_cnt++;
3494 i->bUrgentReceived = 1;
3495 // Extract 3A metadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003496 result.result = translateCbUrgentMetadataToResultMetadata(
3497 metadata, lastUrgentMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003498 // Populate metadata result
3499 result.frame_number = urgent_frame_number;
3500 result.num_output_buffers = 0;
3501 result.output_buffers = NULL;
3502 result.partial_result = i->partial_result_cnt;
3503
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003504 {
3505 Mutex::Autolock l(gHdrPlusClientLock);
3506 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3507 // Notify HDR+ client about the partial metadata.
3508 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3509 result.partial_result == PARTIAL_RESULT_COUNT);
3510 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003511 }
3512
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003513 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003514 LOGD("urgent frame_number = %u, capture_time = %lld",
3515 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003516 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3517 // Instant AEC settled for this frame.
3518 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3519 mInstantAECSettledFrameNumber = urgent_frame_number;
3520 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003521 free_camera_metadata((camera_metadata_t *)result.result);
3522 break;
3523 }
3524 }
3525 }
3526
3527 if (!frame_number_valid) {
3528 LOGD("Not a valid normal frame number, used as SOF only");
3529 if (free_and_bufdone_meta_buf) {
3530 mMetadataChannel->bufDone(metadata_buf);
3531 free(metadata_buf);
3532 }
3533 goto done_metadata;
3534 }
3535 LOGH("valid frame_number = %u, capture_time = %lld",
3536 frame_number, capture_time);
3537
Emilian Peev7650c122017-01-19 08:24:33 -08003538 if (metadata->is_depth_data_valid) {
3539 handleDepthDataLocked(metadata->depth_data, frame_number);
3540 }
3541
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003542 // Check whether any stream buffer corresponding to this is dropped or not
3543 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3544 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3545 for (auto & pendingRequest : mPendingRequestsList) {
3546 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3547 mInstantAECSettledFrameNumber)) {
3548 camera3_notify_msg_t notify_msg = {};
3549 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003550 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003551 QCamera3ProcessingChannel *channel =
3552 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003553 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003554 if (p_cam_frame_drop) {
3555 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003556 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003557 // Got the stream ID for drop frame.
3558 dropFrame = true;
3559 break;
3560 }
3561 }
3562 } else {
3563 // This is instant AEC case.
3564 // For instant AEC drop the stream untill AEC is settled.
3565 dropFrame = true;
3566 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003567
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003568 if (dropFrame) {
3569 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3570 if (p_cam_frame_drop) {
3571 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003572 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003573 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003574 } else {
3575 // For instant AEC, inform frame drop and frame number
3576 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3577 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003578 pendingRequest.frame_number, streamID,
3579 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003580 }
3581 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003582 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003583 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003584 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003585 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003586 if (p_cam_frame_drop) {
3587 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003588 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003589 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003590 } else {
3591 // For instant AEC, inform frame drop and frame number
3592 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3593 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003594 pendingRequest.frame_number, streamID,
3595 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003596 }
3597 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003598 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003599 PendingFrameDrop.stream_ID = streamID;
3600 // Add the Frame drop info to mPendingFrameDropList
3601 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003602 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003603 }
3604 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003605 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003606
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003607 for (auto & pendingRequest : mPendingRequestsList) {
3608 // Find the pending request with the frame number.
3609 if (pendingRequest.frame_number == frame_number) {
3610 // Update the sensor timestamp.
3611 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003612
Thierry Strudel3d639192016-09-09 11:52:26 -07003613
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003614 /* Set the timestamp in display metadata so that clients aware of
3615 private_handle such as VT can use this un-modified timestamps.
3616 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003617 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003618
Thierry Strudel3d639192016-09-09 11:52:26 -07003619 // Find channel requiring metadata, meaning internal offline postprocess
3620 // is needed.
3621 //TODO: for now, we don't support two streams requiring metadata at the same time.
3622 // (because we are not making copies, and metadata buffer is not reference counted.
3623 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003624 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3625 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003626 if (iter->need_metadata) {
3627 internalPproc = true;
3628 QCamera3ProcessingChannel *channel =
3629 (QCamera3ProcessingChannel *)iter->stream->priv;
3630 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003631 if(p_is_metabuf_queued != NULL) {
3632 *p_is_metabuf_queued = true;
3633 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003634 break;
3635 }
3636 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003637 for (auto itr = pendingRequest.internalRequestList.begin();
3638 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003639 if (itr->need_metadata) {
3640 internalPproc = true;
3641 QCamera3ProcessingChannel *channel =
3642 (QCamera3ProcessingChannel *)itr->stream->priv;
3643 channel->queueReprocMetadata(metadata_buf);
3644 break;
3645 }
3646 }
3647
Thierry Strudel54dc9782017-02-15 12:12:10 -08003648 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003649
3650 bool *enableZsl = nullptr;
3651 if (gExposeEnableZslKey) {
3652 enableZsl = &pendingRequest.enableZsl;
3653 }
3654
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003655 resultMetadata = translateFromHalMetadata(metadata,
3656 pendingRequest.timestamp, pendingRequest.request_id,
3657 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3658 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003659 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003660 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003661 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003662 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003663 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003664 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003665
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003666 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003667
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003668 if (pendingRequest.blob_request) {
3669 //Dump tuning metadata if enabled and available
3670 char prop[PROPERTY_VALUE_MAX];
3671 memset(prop, 0, sizeof(prop));
3672 property_get("persist.camera.dumpmetadata", prop, "0");
3673 int32_t enabled = atoi(prop);
3674 if (enabled && metadata->is_tuning_params_valid) {
3675 dumpMetadataToFile(metadata->tuning_params,
3676 mMetaFrameCount,
3677 enabled,
3678 "Snapshot",
3679 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003680 }
3681 }
3682
3683 if (!internalPproc) {
3684 LOGD("couldn't find need_metadata for this metadata");
3685 // Return metadata buffer
3686 if (free_and_bufdone_meta_buf) {
3687 mMetadataChannel->bufDone(metadata_buf);
3688 free(metadata_buf);
3689 }
3690 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003691
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003692 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003693 }
3694 }
3695
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003696 // Try to send out shutter callbacks and capture results.
3697 handlePendingResultsWithLock(frame_number, resultMetadata);
3698 return;
3699
Thierry Strudel3d639192016-09-09 11:52:26 -07003700done_metadata:
3701 for (pendingRequestIterator i = mPendingRequestsList.begin();
3702 i != mPendingRequestsList.end() ;i++) {
3703 i->pipeline_depth++;
3704 }
3705 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3706 unblockRequestIfNecessary();
3707}
3708
3709/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003710 * FUNCTION : handleDepthDataWithLock
3711 *
3712 * DESCRIPTION: Handles incoming depth data
3713 *
3714 * PARAMETERS : @depthData : Depth data
3715 * @frameNumber: Frame number of the incoming depth data
3716 *
3717 * RETURN :
3718 *
3719 *==========================================================================*/
3720void QCamera3HardwareInterface::handleDepthDataLocked(
3721 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3722 uint32_t currentFrameNumber;
3723 buffer_handle_t *depthBuffer;
3724
3725 if (nullptr == mDepthChannel) {
3726 LOGE("Depth channel not present!");
3727 return;
3728 }
3729
3730 camera3_stream_buffer_t resultBuffer =
3731 {.acquire_fence = -1,
3732 .release_fence = -1,
3733 .status = CAMERA3_BUFFER_STATUS_OK,
3734 .buffer = nullptr,
3735 .stream = mDepthChannel->getStream()};
3736 camera3_capture_result_t result =
3737 {.result = nullptr,
3738 .num_output_buffers = 1,
3739 .output_buffers = &resultBuffer,
3740 .partial_result = 0,
3741 .frame_number = 0};
3742
3743 do {
3744 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3745 if (nullptr == depthBuffer) {
3746 break;
3747 }
3748
3749 result.frame_number = currentFrameNumber;
3750 resultBuffer.buffer = depthBuffer;
3751 if (currentFrameNumber == frameNumber) {
3752 int32_t rc = mDepthChannel->populateDepthData(depthData,
3753 frameNumber);
3754 if (NO_ERROR != rc) {
3755 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3756 } else {
3757 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3758 }
3759 } else if (currentFrameNumber > frameNumber) {
3760 break;
3761 } else {
3762 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3763 {{currentFrameNumber, mDepthChannel->getStream(),
3764 CAMERA3_MSG_ERROR_BUFFER}}};
3765 orchestrateNotify(&notify_msg);
3766
3767 LOGE("Depth buffer for frame number: %d is missing "
3768 "returning back!", currentFrameNumber);
3769 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3770 }
3771 mDepthChannel->unmapBuffer(currentFrameNumber);
3772
3773 orchestrateResult(&result);
3774 } while (currentFrameNumber < frameNumber);
3775}
3776
3777/*===========================================================================
3778 * FUNCTION : notifyErrorFoPendingDepthData
3779 *
3780 * DESCRIPTION: Returns error for any pending depth buffers
3781 *
3782 * PARAMETERS : depthCh - depth channel that needs to get flushed
3783 *
3784 * RETURN :
3785 *
3786 *==========================================================================*/
3787void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3788 QCamera3DepthChannel *depthCh) {
3789 uint32_t currentFrameNumber;
3790 buffer_handle_t *depthBuffer;
3791
3792 if (nullptr == depthCh) {
3793 return;
3794 }
3795
3796 camera3_notify_msg_t notify_msg =
3797 {.type = CAMERA3_MSG_ERROR,
3798 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3799 camera3_stream_buffer_t resultBuffer =
3800 {.acquire_fence = -1,
3801 .release_fence = -1,
3802 .buffer = nullptr,
3803 .stream = depthCh->getStream(),
3804 .status = CAMERA3_BUFFER_STATUS_ERROR};
3805 camera3_capture_result_t result =
3806 {.result = nullptr,
3807 .frame_number = 0,
3808 .num_output_buffers = 1,
3809 .partial_result = 0,
3810 .output_buffers = &resultBuffer};
3811
3812 while (nullptr !=
3813 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3814 depthCh->unmapBuffer(currentFrameNumber);
3815
3816 notify_msg.message.error.frame_number = currentFrameNumber;
3817 orchestrateNotify(&notify_msg);
3818
3819 resultBuffer.buffer = depthBuffer;
3820 result.frame_number = currentFrameNumber;
3821 orchestrateResult(&result);
3822 };
3823}
3824
3825/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003826 * FUNCTION : hdrPlusPerfLock
3827 *
3828 * DESCRIPTION: perf lock for HDR+ using custom intent
3829 *
3830 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3831 *
3832 * RETURN : None
3833 *
3834 *==========================================================================*/
3835void QCamera3HardwareInterface::hdrPlusPerfLock(
3836 mm_camera_super_buf_t *metadata_buf)
3837{
3838 if (NULL == metadata_buf) {
3839 LOGE("metadata_buf is NULL");
3840 return;
3841 }
3842 metadata_buffer_t *metadata =
3843 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3844 int32_t *p_frame_number_valid =
3845 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3846 uint32_t *p_frame_number =
3847 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3848
3849 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3850 LOGE("%s: Invalid metadata", __func__);
3851 return;
3852 }
3853
3854 //acquire perf lock for 5 sec after the last HDR frame is captured
3855 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3856 if ((p_frame_number != NULL) &&
3857 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003858 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003859 }
3860 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003861}
3862
3863/*===========================================================================
3864 * FUNCTION : handleInputBufferWithLock
3865 *
3866 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3867 *
3868 * PARAMETERS : @frame_number: frame number of the input buffer
3869 *
3870 * RETURN :
3871 *
3872 *==========================================================================*/
3873void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3874{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003875 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003876 pendingRequestIterator i = mPendingRequestsList.begin();
3877 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3878 i++;
3879 }
3880 if (i != mPendingRequestsList.end() && i->input_buffer) {
3881 //found the right request
3882 if (!i->shutter_notified) {
3883 CameraMetadata settings;
3884 camera3_notify_msg_t notify_msg;
3885 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3886 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3887 if(i->settings) {
3888 settings = i->settings;
3889 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3890 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3891 } else {
3892 LOGE("No timestamp in input settings! Using current one.");
3893 }
3894 } else {
3895 LOGE("Input settings missing!");
3896 }
3897
3898 notify_msg.type = CAMERA3_MSG_SHUTTER;
3899 notify_msg.message.shutter.frame_number = frame_number;
3900 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003901 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003902 i->shutter_notified = true;
3903 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3904 i->frame_number, notify_msg.message.shutter.timestamp);
3905 }
3906
3907 if (i->input_buffer->release_fence != -1) {
3908 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3909 close(i->input_buffer->release_fence);
3910 if (rc != OK) {
3911 LOGE("input buffer sync wait failed %d", rc);
3912 }
3913 }
3914
3915 camera3_capture_result result;
3916 memset(&result, 0, sizeof(camera3_capture_result));
3917 result.frame_number = frame_number;
3918 result.result = i->settings;
3919 result.input_buffer = i->input_buffer;
3920 result.partial_result = PARTIAL_RESULT_COUNT;
3921
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003922 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003923 LOGD("Input request metadata and input buffer frame_number = %u",
3924 i->frame_number);
3925 i = erasePendingRequest(i);
3926 } else {
3927 LOGE("Could not find input request for frame number %d", frame_number);
3928 }
3929}
3930
3931/*===========================================================================
3932 * FUNCTION : handleBufferWithLock
3933 *
3934 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3935 *
3936 * PARAMETERS : @buffer: image buffer for the callback
3937 * @frame_number: frame number of the image buffer
3938 *
3939 * RETURN :
3940 *
3941 *==========================================================================*/
3942void QCamera3HardwareInterface::handleBufferWithLock(
3943 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3944{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003945 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003946
3947 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3948 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3949 }
3950
Thierry Strudel3d639192016-09-09 11:52:26 -07003951 /* Nothing to be done during error state */
3952 if ((ERROR == mState) || (DEINIT == mState)) {
3953 return;
3954 }
3955 if (mFlushPerf) {
3956 handleBuffersDuringFlushLock(buffer);
3957 return;
3958 }
3959 //not in flush
3960 // If the frame number doesn't exist in the pending request list,
3961 // directly send the buffer to the frameworks, and update pending buffers map
3962 // Otherwise, book-keep the buffer.
3963 pendingRequestIterator i = mPendingRequestsList.begin();
3964 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3965 i++;
3966 }
3967 if (i == mPendingRequestsList.end()) {
3968 // Verify all pending requests frame_numbers are greater
3969 for (pendingRequestIterator j = mPendingRequestsList.begin();
3970 j != mPendingRequestsList.end(); j++) {
3971 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3972 LOGW("Error: pending live frame number %d is smaller than %d",
3973 j->frame_number, frame_number);
3974 }
3975 }
3976 camera3_capture_result_t result;
3977 memset(&result, 0, sizeof(camera3_capture_result_t));
3978 result.result = NULL;
3979 result.frame_number = frame_number;
3980 result.num_output_buffers = 1;
3981 result.partial_result = 0;
3982 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3983 m != mPendingFrameDropList.end(); m++) {
3984 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3985 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3986 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3987 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3988 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3989 frame_number, streamID);
3990 m = mPendingFrameDropList.erase(m);
3991 break;
3992 }
3993 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003994 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003995 result.output_buffers = buffer;
3996 LOGH("result frame_number = %d, buffer = %p",
3997 frame_number, buffer->buffer);
3998
3999 mPendingBuffersMap.removeBuf(buffer->buffer);
4000
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004001 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004002 } else {
4003 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004004 if (i->input_buffer->release_fence != -1) {
4005 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
4006 close(i->input_buffer->release_fence);
4007 if (rc != OK) {
4008 LOGE("input buffer sync wait failed %d", rc);
4009 }
4010 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004011 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004012
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004013 // Put buffer into the pending request
4014 for (auto &requestedBuffer : i->buffers) {
4015 if (requestedBuffer.stream == buffer->stream) {
4016 if (requestedBuffer.buffer != nullptr) {
4017 LOGE("Error: buffer is already set");
4018 } else {
4019 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
4020 sizeof(camera3_stream_buffer_t));
4021 *(requestedBuffer.buffer) = *buffer;
4022 LOGH("cache buffer %p at result frame_number %u",
4023 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07004024 }
4025 }
4026 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004027
4028 if (i->input_buffer) {
4029 // For a reprocessing request, try to send out shutter callback and result metadata.
4030 handlePendingResultsWithLock(frame_number, nullptr);
4031 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004032 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004033
4034 if (mPreviewStarted == false) {
4035 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4036 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004037 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4038
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004039 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4040 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4041 mPreviewStarted = true;
4042
4043 // Set power hint for preview
4044 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4045 }
4046 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004047}
4048
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004049void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4050 const camera_metadata_t *resultMetadata)
4051{
4052 // Find the pending request for this result metadata.
4053 auto requestIter = mPendingRequestsList.begin();
4054 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4055 requestIter++;
4056 }
4057
4058 if (requestIter == mPendingRequestsList.end()) {
4059 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4060 return;
4061 }
4062
4063 // Update the result metadata
4064 requestIter->resultMetadata = resultMetadata;
4065
4066 // Check what type of request this is.
4067 bool liveRequest = false;
4068 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004069 // HDR+ request doesn't have partial results.
4070 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004071 } else if (requestIter->input_buffer != nullptr) {
4072 // Reprocessing request result is the same as settings.
4073 requestIter->resultMetadata = requestIter->settings;
4074 // Reprocessing request doesn't have partial results.
4075 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4076 } else {
4077 liveRequest = true;
4078 requestIter->partial_result_cnt++;
4079 mPendingLiveRequest--;
4080
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004081 {
4082 Mutex::Autolock l(gHdrPlusClientLock);
4083 // For a live request, send the metadata to HDR+ client.
4084 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4085 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4086 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4087 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004088 }
4089 }
4090
4091 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4092 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4093 bool readyToSend = true;
4094
4095 // Iterate through the pending requests to send out shutter callbacks and results that are
4096 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4097 // live requests that don't have result metadata yet.
4098 auto iter = mPendingRequestsList.begin();
4099 while (iter != mPendingRequestsList.end()) {
4100 // Check if current pending request is ready. If it's not ready, the following pending
4101 // requests are also not ready.
4102 if (readyToSend && iter->resultMetadata == nullptr) {
4103 readyToSend = false;
4104 }
4105
4106 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4107
4108 std::vector<camera3_stream_buffer_t> outputBuffers;
4109
4110 camera3_capture_result_t result = {};
4111 result.frame_number = iter->frame_number;
4112 result.result = iter->resultMetadata;
4113 result.partial_result = iter->partial_result_cnt;
4114
4115 // If this pending buffer has result metadata, we may be able to send out shutter callback
4116 // and result metadata.
4117 if (iter->resultMetadata != nullptr) {
4118 if (!readyToSend) {
4119 // If any of the previous pending request is not ready, this pending request is
4120 // also not ready to send in order to keep shutter callbacks and result metadata
4121 // in order.
4122 iter++;
4123 continue;
4124 }
4125
4126 // Invoke shutter callback if not yet.
4127 if (!iter->shutter_notified) {
4128 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4129
4130 // Find the timestamp in HDR+ result metadata
4131 camera_metadata_ro_entry_t entry;
4132 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4133 ANDROID_SENSOR_TIMESTAMP, &entry);
4134 if (res != OK) {
4135 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4136 __FUNCTION__, iter->frame_number, strerror(-res), res);
4137 } else {
4138 timestamp = entry.data.i64[0];
4139 }
4140
4141 camera3_notify_msg_t notify_msg = {};
4142 notify_msg.type = CAMERA3_MSG_SHUTTER;
4143 notify_msg.message.shutter.frame_number = iter->frame_number;
4144 notify_msg.message.shutter.timestamp = timestamp;
4145 orchestrateNotify(&notify_msg);
4146 iter->shutter_notified = true;
4147 }
4148
4149 result.input_buffer = iter->input_buffer;
4150
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004151 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4152 // If the result metadata belongs to a live request, notify errors for previous pending
4153 // live requests.
4154 mPendingLiveRequest--;
4155
4156 CameraMetadata dummyMetadata;
4157 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4158 result.result = dummyMetadata.release();
4159
4160 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004161
4162 // partial_result should be PARTIAL_RESULT_CNT in case of
4163 // ERROR_RESULT.
4164 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4165 result.partial_result = PARTIAL_RESULT_COUNT;
4166
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004167 } else {
4168 iter++;
4169 continue;
4170 }
4171
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004172 // Prepare output buffer array
4173 for (auto bufferInfoIter = iter->buffers.begin();
4174 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4175 if (bufferInfoIter->buffer != nullptr) {
4176
4177 QCamera3Channel *channel =
4178 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4179 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4180
4181 // Check if this buffer is a dropped frame.
4182 auto frameDropIter = mPendingFrameDropList.begin();
4183 while (frameDropIter != mPendingFrameDropList.end()) {
4184 if((frameDropIter->stream_ID == streamID) &&
4185 (frameDropIter->frame_number == frameNumber)) {
4186 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4187 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4188 streamID);
4189 mPendingFrameDropList.erase(frameDropIter);
4190 break;
4191 } else {
4192 frameDropIter++;
4193 }
4194 }
4195
4196 // Check buffer error status
4197 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4198 bufferInfoIter->buffer->buffer);
4199 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4200
4201 outputBuffers.push_back(*(bufferInfoIter->buffer));
4202 free(bufferInfoIter->buffer);
4203 bufferInfoIter->buffer = NULL;
4204 }
4205 }
4206
4207 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4208 result.num_output_buffers = outputBuffers.size();
4209
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004210 orchestrateResult(&result);
4211
4212 // For reprocessing, result metadata is the same as settings so do not free it here to
4213 // avoid double free.
4214 if (result.result != iter->settings) {
4215 free_camera_metadata((camera_metadata_t *)result.result);
4216 }
4217 iter->resultMetadata = nullptr;
4218 iter = erasePendingRequest(iter);
4219 }
4220
4221 if (liveRequest) {
4222 for (auto &iter : mPendingRequestsList) {
4223 // Increment pipeline depth for the following pending requests.
4224 if (iter.frame_number > frameNumber) {
4225 iter.pipeline_depth++;
4226 }
4227 }
4228 }
4229
4230 unblockRequestIfNecessary();
4231}
4232
Thierry Strudel3d639192016-09-09 11:52:26 -07004233/*===========================================================================
4234 * FUNCTION : unblockRequestIfNecessary
4235 *
4236 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4237 * that mMutex is held when this function is called.
4238 *
4239 * PARAMETERS :
4240 *
4241 * RETURN :
4242 *
4243 *==========================================================================*/
4244void QCamera3HardwareInterface::unblockRequestIfNecessary()
4245{
4246 // Unblock process_capture_request
4247 pthread_cond_signal(&mRequestCond);
4248}
4249
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004250/*===========================================================================
4251 * FUNCTION : isHdrSnapshotRequest
4252 *
4253 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4254 *
4255 * PARAMETERS : camera3 request structure
4256 *
4257 * RETURN : boolean decision variable
4258 *
4259 *==========================================================================*/
4260bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4261{
4262 if (request == NULL) {
4263 LOGE("Invalid request handle");
4264 assert(0);
4265 return false;
4266 }
4267
4268 if (!mForceHdrSnapshot) {
4269 CameraMetadata frame_settings;
4270 frame_settings = request->settings;
4271
4272 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4273 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4274 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4275 return false;
4276 }
4277 } else {
4278 return false;
4279 }
4280
4281 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4282 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4283 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4284 return false;
4285 }
4286 } else {
4287 return false;
4288 }
4289 }
4290
4291 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4292 if (request->output_buffers[i].stream->format
4293 == HAL_PIXEL_FORMAT_BLOB) {
4294 return true;
4295 }
4296 }
4297
4298 return false;
4299}
4300/*===========================================================================
4301 * FUNCTION : orchestrateRequest
4302 *
4303 * DESCRIPTION: Orchestrates a capture request from camera service
4304 *
4305 * PARAMETERS :
4306 * @request : request from framework to process
4307 *
4308 * RETURN : Error status codes
4309 *
4310 *==========================================================================*/
4311int32_t QCamera3HardwareInterface::orchestrateRequest(
4312 camera3_capture_request_t *request)
4313{
4314
4315 uint32_t originalFrameNumber = request->frame_number;
4316 uint32_t originalOutputCount = request->num_output_buffers;
4317 const camera_metadata_t *original_settings = request->settings;
4318 List<InternalRequest> internallyRequestedStreams;
4319 List<InternalRequest> emptyInternalList;
4320
4321 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4322 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4323 uint32_t internalFrameNumber;
4324 CameraMetadata modified_meta;
4325
4326
4327 /* Add Blob channel to list of internally requested streams */
4328 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4329 if (request->output_buffers[i].stream->format
4330 == HAL_PIXEL_FORMAT_BLOB) {
4331 InternalRequest streamRequested;
4332 streamRequested.meteringOnly = 1;
4333 streamRequested.need_metadata = 0;
4334 streamRequested.stream = request->output_buffers[i].stream;
4335 internallyRequestedStreams.push_back(streamRequested);
4336 }
4337 }
4338 request->num_output_buffers = 0;
4339 auto itr = internallyRequestedStreams.begin();
4340
4341 /* Modify setting to set compensation */
4342 modified_meta = request->settings;
4343 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4344 uint8_t aeLock = 1;
4345 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4346 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4347 camera_metadata_t *modified_settings = modified_meta.release();
4348 request->settings = modified_settings;
4349
4350 /* Capture Settling & -2x frame */
4351 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4352 request->frame_number = internalFrameNumber;
4353 processCaptureRequest(request, internallyRequestedStreams);
4354
4355 request->num_output_buffers = originalOutputCount;
4356 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4357 request->frame_number = internalFrameNumber;
4358 processCaptureRequest(request, emptyInternalList);
4359 request->num_output_buffers = 0;
4360
4361 modified_meta = modified_settings;
4362 expCompensation = 0;
4363 aeLock = 1;
4364 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4365 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4366 modified_settings = modified_meta.release();
4367 request->settings = modified_settings;
4368
4369 /* Capture Settling & 0X frame */
4370
4371 itr = internallyRequestedStreams.begin();
4372 if (itr == internallyRequestedStreams.end()) {
4373 LOGE("Error Internally Requested Stream list is empty");
4374 assert(0);
4375 } else {
4376 itr->need_metadata = 0;
4377 itr->meteringOnly = 1;
4378 }
4379
4380 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4381 request->frame_number = internalFrameNumber;
4382 processCaptureRequest(request, internallyRequestedStreams);
4383
4384 itr = internallyRequestedStreams.begin();
4385 if (itr == internallyRequestedStreams.end()) {
4386 ALOGE("Error Internally Requested Stream list is empty");
4387 assert(0);
4388 } else {
4389 itr->need_metadata = 1;
4390 itr->meteringOnly = 0;
4391 }
4392
4393 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4394 request->frame_number = internalFrameNumber;
4395 processCaptureRequest(request, internallyRequestedStreams);
4396
4397 /* Capture 2X frame*/
4398 modified_meta = modified_settings;
4399 expCompensation = GB_HDR_2X_STEP_EV;
4400 aeLock = 1;
4401 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4402 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4403 modified_settings = modified_meta.release();
4404 request->settings = modified_settings;
4405
4406 itr = internallyRequestedStreams.begin();
4407 if (itr == internallyRequestedStreams.end()) {
4408 ALOGE("Error Internally Requested Stream list is empty");
4409 assert(0);
4410 } else {
4411 itr->need_metadata = 0;
4412 itr->meteringOnly = 1;
4413 }
4414 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4415 request->frame_number = internalFrameNumber;
4416 processCaptureRequest(request, internallyRequestedStreams);
4417
4418 itr = internallyRequestedStreams.begin();
4419 if (itr == internallyRequestedStreams.end()) {
4420 ALOGE("Error Internally Requested Stream list is empty");
4421 assert(0);
4422 } else {
4423 itr->need_metadata = 1;
4424 itr->meteringOnly = 0;
4425 }
4426
4427 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4428 request->frame_number = internalFrameNumber;
4429 processCaptureRequest(request, internallyRequestedStreams);
4430
4431
4432 /* Capture 2X on original streaming config*/
4433 internallyRequestedStreams.clear();
4434
4435 /* Restore original settings pointer */
4436 request->settings = original_settings;
4437 } else {
4438 uint32_t internalFrameNumber;
4439 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4440 request->frame_number = internalFrameNumber;
4441 return processCaptureRequest(request, internallyRequestedStreams);
4442 }
4443
4444 return NO_ERROR;
4445}
4446
4447/*===========================================================================
4448 * FUNCTION : orchestrateResult
4449 *
4450 * DESCRIPTION: Orchestrates a capture result to camera service
4451 *
4452 * PARAMETERS :
4453 * @request : request from framework to process
4454 *
4455 * RETURN :
4456 *
4457 *==========================================================================*/
4458void QCamera3HardwareInterface::orchestrateResult(
4459 camera3_capture_result_t *result)
4460{
4461 uint32_t frameworkFrameNumber;
4462 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4463 frameworkFrameNumber);
4464 if (rc != NO_ERROR) {
4465 LOGE("Cannot find translated frameworkFrameNumber");
4466 assert(0);
4467 } else {
4468 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004469 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004470 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004471 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004472 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4473 camera_metadata_entry_t entry;
4474 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4475 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004476 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004477 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4478 if (ret != OK)
4479 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004480 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004481 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004482 result->frame_number = frameworkFrameNumber;
4483 mCallbackOps->process_capture_result(mCallbackOps, result);
4484 }
4485 }
4486}
4487
4488/*===========================================================================
4489 * FUNCTION : orchestrateNotify
4490 *
4491 * DESCRIPTION: Orchestrates a notify to camera service
4492 *
4493 * PARAMETERS :
4494 * @request : request from framework to process
4495 *
4496 * RETURN :
4497 *
4498 *==========================================================================*/
4499void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4500{
4501 uint32_t frameworkFrameNumber;
4502 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004503 int32_t rc = NO_ERROR;
4504
4505 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004506 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004507
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004508 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004509 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4510 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4511 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004512 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004513 LOGE("Cannot find translated frameworkFrameNumber");
4514 assert(0);
4515 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004516 }
4517 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004518
4519 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4520 LOGD("Internal Request drop the notifyCb");
4521 } else {
4522 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4523 mCallbackOps->notify(mCallbackOps, notify_msg);
4524 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004525}
4526
4527/*===========================================================================
4528 * FUNCTION : FrameNumberRegistry
4529 *
4530 * DESCRIPTION: Constructor
4531 *
4532 * PARAMETERS :
4533 *
4534 * RETURN :
4535 *
4536 *==========================================================================*/
4537FrameNumberRegistry::FrameNumberRegistry()
4538{
4539 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4540}
4541
4542/*===========================================================================
4543 * FUNCTION : ~FrameNumberRegistry
4544 *
4545 * DESCRIPTION: Destructor
4546 *
4547 * PARAMETERS :
4548 *
4549 * RETURN :
4550 *
4551 *==========================================================================*/
4552FrameNumberRegistry::~FrameNumberRegistry()
4553{
4554}
4555
4556/*===========================================================================
4557 * FUNCTION : PurgeOldEntriesLocked
4558 *
4559 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4560 *
4561 * PARAMETERS :
4562 *
4563 * RETURN : NONE
4564 *
4565 *==========================================================================*/
4566void FrameNumberRegistry::purgeOldEntriesLocked()
4567{
4568 while (_register.begin() != _register.end()) {
4569 auto itr = _register.begin();
4570 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4571 _register.erase(itr);
4572 } else {
4573 return;
4574 }
4575 }
4576}
4577
4578/*===========================================================================
4579 * FUNCTION : allocStoreInternalFrameNumber
4580 *
4581 * DESCRIPTION: Method to note down a framework request and associate a new
4582 * internal request number against it
4583 *
4584 * PARAMETERS :
4585 * @fFrameNumber: Identifier given by framework
4586 * @internalFN : Output parameter which will have the newly generated internal
4587 * entry
4588 *
4589 * RETURN : Error code
4590 *
4591 *==========================================================================*/
4592int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4593 uint32_t &internalFrameNumber)
4594{
4595 Mutex::Autolock lock(mRegistryLock);
4596 internalFrameNumber = _nextFreeInternalNumber++;
4597 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4598 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4599 purgeOldEntriesLocked();
4600 return NO_ERROR;
4601}
4602
4603/*===========================================================================
4604 * FUNCTION : generateStoreInternalFrameNumber
4605 *
4606 * DESCRIPTION: Method to associate a new internal request number independent
4607 * of any associate with framework requests
4608 *
4609 * PARAMETERS :
4610 * @internalFrame#: Output parameter which will have the newly generated internal
4611 *
4612 *
4613 * RETURN : Error code
4614 *
4615 *==========================================================================*/
4616int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4617{
4618 Mutex::Autolock lock(mRegistryLock);
4619 internalFrameNumber = _nextFreeInternalNumber++;
4620 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4621 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4622 purgeOldEntriesLocked();
4623 return NO_ERROR;
4624}
4625
4626/*===========================================================================
4627 * FUNCTION : getFrameworkFrameNumber
4628 *
4629 * DESCRIPTION: Method to query the framework framenumber given an internal #
4630 *
4631 * PARAMETERS :
4632 * @internalFrame#: Internal reference
4633 * @frameworkframenumber: Output parameter holding framework frame entry
4634 *
4635 * RETURN : Error code
4636 *
4637 *==========================================================================*/
4638int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4639 uint32_t &frameworkFrameNumber)
4640{
4641 Mutex::Autolock lock(mRegistryLock);
4642 auto itr = _register.find(internalFrameNumber);
4643 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004644 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004645 return -ENOENT;
4646 }
4647
4648 frameworkFrameNumber = itr->second;
4649 purgeOldEntriesLocked();
4650 return NO_ERROR;
4651}
Thierry Strudel3d639192016-09-09 11:52:26 -07004652
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004653status_t QCamera3HardwareInterface::fillPbStreamConfig(
4654 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4655 QCamera3Channel *channel, uint32_t streamIndex) {
4656 if (config == nullptr) {
4657 LOGE("%s: config is null", __FUNCTION__);
4658 return BAD_VALUE;
4659 }
4660
4661 if (channel == nullptr) {
4662 LOGE("%s: channel is null", __FUNCTION__);
4663 return BAD_VALUE;
4664 }
4665
4666 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4667 if (stream == nullptr) {
4668 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4669 return NAME_NOT_FOUND;
4670 }
4671
4672 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4673 if (streamInfo == nullptr) {
4674 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4675 return NAME_NOT_FOUND;
4676 }
4677
4678 config->id = pbStreamId;
4679 config->image.width = streamInfo->dim.width;
4680 config->image.height = streamInfo->dim.height;
4681 config->image.padding = 0;
4682 config->image.format = pbStreamFormat;
4683
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004684 uint32_t totalPlaneSize = 0;
4685
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004686 // Fill plane information.
4687 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4688 pbcamera::PlaneConfiguration plane;
4689 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4690 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4691 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004692
4693 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004694 }
4695
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004696 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004697 return OK;
4698}
4699
Thierry Strudel3d639192016-09-09 11:52:26 -07004700/*===========================================================================
4701 * FUNCTION : processCaptureRequest
4702 *
4703 * DESCRIPTION: process a capture request from camera service
4704 *
4705 * PARAMETERS :
4706 * @request : request from framework to process
4707 *
4708 * RETURN :
4709 *
4710 *==========================================================================*/
4711int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004712 camera3_capture_request_t *request,
4713 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004714{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004715 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004716 int rc = NO_ERROR;
4717 int32_t request_id;
4718 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004719 bool isVidBufRequested = false;
4720 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004721 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004722
4723 pthread_mutex_lock(&mMutex);
4724
4725 // Validate current state
4726 switch (mState) {
4727 case CONFIGURED:
4728 case STARTED:
4729 /* valid state */
4730 break;
4731
4732 case ERROR:
4733 pthread_mutex_unlock(&mMutex);
4734 handleCameraDeviceError();
4735 return -ENODEV;
4736
4737 default:
4738 LOGE("Invalid state %d", mState);
4739 pthread_mutex_unlock(&mMutex);
4740 return -ENODEV;
4741 }
4742
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004743 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004744 if (rc != NO_ERROR) {
4745 LOGE("incoming request is not valid");
4746 pthread_mutex_unlock(&mMutex);
4747 return rc;
4748 }
4749
4750 meta = request->settings;
4751
4752 // For first capture request, send capture intent, and
4753 // stream on all streams
4754 if (mState == CONFIGURED) {
4755 // send an unconfigure to the backend so that the isp
4756 // resources are deallocated
4757 if (!mFirstConfiguration) {
4758 cam_stream_size_info_t stream_config_info;
4759 int32_t hal_version = CAM_HAL_V3;
4760 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4761 stream_config_info.buffer_info.min_buffers =
4762 MIN_INFLIGHT_REQUESTS;
4763 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004764 m_bIs4KVideo ? 0 :
4765 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004766 clear_metadata_buffer(mParameters);
4767 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4768 CAM_INTF_PARM_HAL_VERSION, hal_version);
4769 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4770 CAM_INTF_META_STREAM_INFO, stream_config_info);
4771 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4772 mParameters);
4773 if (rc < 0) {
4774 LOGE("set_parms for unconfigure failed");
4775 pthread_mutex_unlock(&mMutex);
4776 return rc;
4777 }
4778 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004779 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004780 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004781 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004782 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004783 property_get("persist.camera.is_type", is_type_value, "4");
4784 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4785 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4786 property_get("persist.camera.is_type_preview", is_type_value, "4");
4787 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4788 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004789
4790 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4791 int32_t hal_version = CAM_HAL_V3;
4792 uint8_t captureIntent =
4793 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4794 mCaptureIntent = captureIntent;
4795 clear_metadata_buffer(mParameters);
4796 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4797 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4798 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004799 if (mFirstConfiguration) {
4800 // configure instant AEC
4801 // Instant AEC is a session based parameter and it is needed only
4802 // once per complete session after open camera.
4803 // i.e. This is set only once for the first capture request, after open camera.
4804 setInstantAEC(meta);
4805 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004806 uint8_t fwkVideoStabMode=0;
4807 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4808 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4809 }
4810
Xue Tuecac74e2017-04-17 13:58:15 -07004811 // If EIS setprop is enabled then only turn it on for video/preview
4812 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004813 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004814 int32_t vsMode;
4815 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4816 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4817 rc = BAD_VALUE;
4818 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004819 LOGD("setEis %d", setEis);
4820 bool eis3Supported = false;
4821 size_t count = IS_TYPE_MAX;
4822 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4823 for (size_t i = 0; i < count; i++) {
4824 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4825 eis3Supported = true;
4826 break;
4827 }
4828 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004829
4830 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004831 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004832 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4833 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004834 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4835 is_type = isTypePreview;
4836 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4837 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4838 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004839 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004840 } else {
4841 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004842 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004843 } else {
4844 is_type = IS_TYPE_NONE;
4845 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004846 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004847 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004848 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4849 }
4850 }
4851
4852 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4853 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4854
Thierry Strudel54dc9782017-02-15 12:12:10 -08004855 //Disable tintless only if the property is set to 0
4856 memset(prop, 0, sizeof(prop));
4857 property_get("persist.camera.tintless.enable", prop, "1");
4858 int32_t tintless_value = atoi(prop);
4859
Thierry Strudel3d639192016-09-09 11:52:26 -07004860 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4861 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004862
Thierry Strudel3d639192016-09-09 11:52:26 -07004863 //Disable CDS for HFR mode or if DIS/EIS is on.
4864 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4865 //after every configure_stream
4866 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4867 (m_bIsVideo)) {
4868 int32_t cds = CAM_CDS_MODE_OFF;
4869 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4870 CAM_INTF_PARM_CDS_MODE, cds))
4871 LOGE("Failed to disable CDS for HFR mode");
4872
4873 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004874
4875 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4876 uint8_t* use_av_timer = NULL;
4877
4878 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004879 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004880 use_av_timer = &m_debug_avtimer;
4881 }
4882 else{
4883 use_av_timer =
4884 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004885 if (use_av_timer) {
4886 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4887 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004888 }
4889
4890 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4891 rc = BAD_VALUE;
4892 }
4893 }
4894
Thierry Strudel3d639192016-09-09 11:52:26 -07004895 setMobicat();
4896
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004897 uint8_t nrMode = 0;
4898 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4899 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4900 }
4901
Thierry Strudel3d639192016-09-09 11:52:26 -07004902 /* Set fps and hfr mode while sending meta stream info so that sensor
4903 * can configure appropriate streaming mode */
4904 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004905 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4906 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004907 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4908 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004909 if (rc == NO_ERROR) {
4910 int32_t max_fps =
4911 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004912 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004913 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4914 }
4915 /* For HFR, more buffers are dequeued upfront to improve the performance */
4916 if (mBatchSize) {
4917 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4918 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4919 }
4920 }
4921 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004922 LOGE("setHalFpsRange failed");
4923 }
4924 }
4925 if (meta.exists(ANDROID_CONTROL_MODE)) {
4926 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4927 rc = extractSceneMode(meta, metaMode, mParameters);
4928 if (rc != NO_ERROR) {
4929 LOGE("extractSceneMode failed");
4930 }
4931 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004932 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004933
Thierry Strudel04e026f2016-10-10 11:27:36 -07004934 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4935 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4936 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4937 rc = setVideoHdrMode(mParameters, vhdr);
4938 if (rc != NO_ERROR) {
4939 LOGE("setVideoHDR is failed");
4940 }
4941 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004942
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004943 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004944 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004945 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004946 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4947 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4948 sensorModeFullFov)) {
4949 rc = BAD_VALUE;
4950 }
4951 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004952 //TODO: validate the arguments, HSV scenemode should have only the
4953 //advertised fps ranges
4954
4955 /*set the capture intent, hal version, tintless, stream info,
4956 *and disenable parameters to the backend*/
4957 LOGD("set_parms META_STREAM_INFO " );
4958 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004959 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4960 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004961 mStreamConfigInfo.type[i],
4962 mStreamConfigInfo.stream_sizes[i].width,
4963 mStreamConfigInfo.stream_sizes[i].height,
4964 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004965 mStreamConfigInfo.format[i],
4966 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004967 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004968
Thierry Strudel3d639192016-09-09 11:52:26 -07004969 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4970 mParameters);
4971 if (rc < 0) {
4972 LOGE("set_parms failed for hal version, stream info");
4973 }
4974
Chien-Yu Chenee335912017-02-09 17:53:20 -08004975 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4976 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004977 if (rc != NO_ERROR) {
4978 LOGE("Failed to get sensor output size");
4979 pthread_mutex_unlock(&mMutex);
4980 goto error_exit;
4981 }
4982
4983 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4984 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004985 mSensorModeInfo.active_array_size.width,
4986 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004987
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004988 {
4989 Mutex::Autolock l(gHdrPlusClientLock);
4990 if (EaselManagerClientOpened) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004991 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004992 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
4993 if (rc != OK) {
4994 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
4995 mCameraId, mSensorModeInfo.op_pixel_clk);
4996 pthread_mutex_unlock(&mMutex);
4997 goto error_exit;
4998 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004999 }
5000 }
5001
Thierry Strudel3d639192016-09-09 11:52:26 -07005002 /* Set batchmode before initializing channel. Since registerBuffer
5003 * internally initializes some of the channels, better set batchmode
5004 * even before first register buffer */
5005 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5006 it != mStreamInfo.end(); it++) {
5007 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5008 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5009 && mBatchSize) {
5010 rc = channel->setBatchSize(mBatchSize);
5011 //Disable per frame map unmap for HFR/batchmode case
5012 rc |= channel->setPerFrameMapUnmap(false);
5013 if (NO_ERROR != rc) {
5014 LOGE("Channel init failed %d", rc);
5015 pthread_mutex_unlock(&mMutex);
5016 goto error_exit;
5017 }
5018 }
5019 }
5020
5021 //First initialize all streams
5022 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5023 it != mStreamInfo.end(); it++) {
5024 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005025
5026 /* Initial value of NR mode is needed before stream on */
5027 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005028 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5029 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005030 setEis) {
5031 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5032 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5033 is_type = mStreamConfigInfo.is_type[i];
5034 break;
5035 }
5036 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005037 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005038 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005039 rc = channel->initialize(IS_TYPE_NONE);
5040 }
5041 if (NO_ERROR != rc) {
5042 LOGE("Channel initialization failed %d", rc);
5043 pthread_mutex_unlock(&mMutex);
5044 goto error_exit;
5045 }
5046 }
5047
5048 if (mRawDumpChannel) {
5049 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5050 if (rc != NO_ERROR) {
5051 LOGE("Error: Raw Dump Channel init failed");
5052 pthread_mutex_unlock(&mMutex);
5053 goto error_exit;
5054 }
5055 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005056 if (mHdrPlusRawSrcChannel) {
5057 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5058 if (rc != NO_ERROR) {
5059 LOGE("Error: HDR+ RAW Source Channel init failed");
5060 pthread_mutex_unlock(&mMutex);
5061 goto error_exit;
5062 }
5063 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005064 if (mSupportChannel) {
5065 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5066 if (rc < 0) {
5067 LOGE("Support channel initialization failed");
5068 pthread_mutex_unlock(&mMutex);
5069 goto error_exit;
5070 }
5071 }
5072 if (mAnalysisChannel) {
5073 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5074 if (rc < 0) {
5075 LOGE("Analysis channel initialization failed");
5076 pthread_mutex_unlock(&mMutex);
5077 goto error_exit;
5078 }
5079 }
5080 if (mDummyBatchChannel) {
5081 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5082 if (rc < 0) {
5083 LOGE("mDummyBatchChannel setBatchSize failed");
5084 pthread_mutex_unlock(&mMutex);
5085 goto error_exit;
5086 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005087 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005088 if (rc < 0) {
5089 LOGE("mDummyBatchChannel initialization failed");
5090 pthread_mutex_unlock(&mMutex);
5091 goto error_exit;
5092 }
5093 }
5094
5095 // Set bundle info
5096 rc = setBundleInfo();
5097 if (rc < 0) {
5098 LOGE("setBundleInfo failed %d", rc);
5099 pthread_mutex_unlock(&mMutex);
5100 goto error_exit;
5101 }
5102
5103 //update settings from app here
5104 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5105 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5106 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5107 }
5108 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5109 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5110 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5111 }
5112 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5113 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5114 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5115
5116 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5117 (mLinkedCameraId != mCameraId) ) {
5118 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5119 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005120 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005121 goto error_exit;
5122 }
5123 }
5124
5125 // add bundle related cameras
5126 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5127 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005128 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5129 &m_pDualCamCmdPtr->bundle_info;
5130 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005131 if (mIsDeviceLinked)
5132 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5133 else
5134 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5135
5136 pthread_mutex_lock(&gCamLock);
5137
5138 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5139 LOGE("Dualcam: Invalid Session Id ");
5140 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005141 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005142 goto error_exit;
5143 }
5144
5145 if (mIsMainCamera == 1) {
5146 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5147 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005148 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005149 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005150 // related session id should be session id of linked session
5151 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5152 } else {
5153 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5154 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005155 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005156 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005157 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5158 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005159 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005160 pthread_mutex_unlock(&gCamLock);
5161
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005162 rc = mCameraHandle->ops->set_dual_cam_cmd(
5163 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005164 if (rc < 0) {
5165 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005166 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005167 goto error_exit;
5168 }
5169 }
5170
5171 //Then start them.
5172 LOGH("Start META Channel");
5173 rc = mMetadataChannel->start();
5174 if (rc < 0) {
5175 LOGE("META channel start failed");
5176 pthread_mutex_unlock(&mMutex);
5177 goto error_exit;
5178 }
5179
5180 if (mAnalysisChannel) {
5181 rc = mAnalysisChannel->start();
5182 if (rc < 0) {
5183 LOGE("Analysis channel start failed");
5184 mMetadataChannel->stop();
5185 pthread_mutex_unlock(&mMutex);
5186 goto error_exit;
5187 }
5188 }
5189
5190 if (mSupportChannel) {
5191 rc = mSupportChannel->start();
5192 if (rc < 0) {
5193 LOGE("Support channel start failed");
5194 mMetadataChannel->stop();
5195 /* Although support and analysis are mutually exclusive today
5196 adding it in anycase for future proofing */
5197 if (mAnalysisChannel) {
5198 mAnalysisChannel->stop();
5199 }
5200 pthread_mutex_unlock(&mMutex);
5201 goto error_exit;
5202 }
5203 }
5204 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5205 it != mStreamInfo.end(); it++) {
5206 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5207 LOGH("Start Processing Channel mask=%d",
5208 channel->getStreamTypeMask());
5209 rc = channel->start();
5210 if (rc < 0) {
5211 LOGE("channel start failed");
5212 pthread_mutex_unlock(&mMutex);
5213 goto error_exit;
5214 }
5215 }
5216
5217 if (mRawDumpChannel) {
5218 LOGD("Starting raw dump stream");
5219 rc = mRawDumpChannel->start();
5220 if (rc != NO_ERROR) {
5221 LOGE("Error Starting Raw Dump Channel");
5222 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5223 it != mStreamInfo.end(); it++) {
5224 QCamera3Channel *channel =
5225 (QCamera3Channel *)(*it)->stream->priv;
5226 LOGH("Stopping Processing Channel mask=%d",
5227 channel->getStreamTypeMask());
5228 channel->stop();
5229 }
5230 if (mSupportChannel)
5231 mSupportChannel->stop();
5232 if (mAnalysisChannel) {
5233 mAnalysisChannel->stop();
5234 }
5235 mMetadataChannel->stop();
5236 pthread_mutex_unlock(&mMutex);
5237 goto error_exit;
5238 }
5239 }
5240
5241 if (mChannelHandle) {
5242
5243 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5244 mChannelHandle);
5245 if (rc != NO_ERROR) {
5246 LOGE("start_channel failed %d", rc);
5247 pthread_mutex_unlock(&mMutex);
5248 goto error_exit;
5249 }
5250 }
5251
5252 goto no_error;
5253error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005254 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005255 return rc;
5256no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005257 mWokenUpByDaemon = false;
5258 mPendingLiveRequest = 0;
5259 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005260 }
5261
Chien-Yu Chenee335912017-02-09 17:53:20 -08005262 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chened0a4c92017-05-01 18:25:03 +00005263 {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005264 Mutex::Autolock l(gHdrPlusClientLock);
5265 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5266 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5267 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5268 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5269 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5270 rc = enableHdrPlusModeLocked();
Chien-Yu Chenee335912017-02-09 17:53:20 -08005271 if (rc != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005272 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -08005273 pthread_mutex_unlock(&mMutex);
5274 return rc;
5275 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005276
5277 mFirstPreviewIntentSeen = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -08005278 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08005279 }
5280
Thierry Strudel3d639192016-09-09 11:52:26 -07005281 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005282 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005283
5284 if (mFlushPerf) {
5285 //we cannot accept any requests during flush
5286 LOGE("process_capture_request cannot proceed during flush");
5287 pthread_mutex_unlock(&mMutex);
5288 return NO_ERROR; //should return an error
5289 }
5290
5291 if (meta.exists(ANDROID_REQUEST_ID)) {
5292 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5293 mCurrentRequestId = request_id;
5294 LOGD("Received request with id: %d", request_id);
5295 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5296 LOGE("Unable to find request id field, \
5297 & no previous id available");
5298 pthread_mutex_unlock(&mMutex);
5299 return NAME_NOT_FOUND;
5300 } else {
5301 LOGD("Re-using old request id");
5302 request_id = mCurrentRequestId;
5303 }
5304
5305 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5306 request->num_output_buffers,
5307 request->input_buffer,
5308 frameNumber);
5309 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005310 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005311 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005312 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005313 uint32_t snapshotStreamId = 0;
5314 for (size_t i = 0; i < request->num_output_buffers; i++) {
5315 const camera3_stream_buffer_t& output = request->output_buffers[i];
5316 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5317
Emilian Peev7650c122017-01-19 08:24:33 -08005318 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5319 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005320 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005321 blob_request = 1;
5322 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5323 }
5324
5325 if (output.acquire_fence != -1) {
5326 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5327 close(output.acquire_fence);
5328 if (rc != OK) {
5329 LOGE("sync wait failed %d", rc);
5330 pthread_mutex_unlock(&mMutex);
5331 return rc;
5332 }
5333 }
5334
Emilian Peev0f3c3162017-03-15 12:57:46 +00005335 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5336 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005337 depthRequestPresent = true;
5338 continue;
5339 }
5340
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005341 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005342 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005343
5344 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5345 isVidBufRequested = true;
5346 }
5347 }
5348
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005349 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5350 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5351 itr++) {
5352 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5353 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5354 channel->getStreamID(channel->getStreamTypeMask());
5355
5356 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5357 isVidBufRequested = true;
5358 }
5359 }
5360
Thierry Strudel3d639192016-09-09 11:52:26 -07005361 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005362 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005363 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005364 }
5365 if (blob_request && mRawDumpChannel) {
5366 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005367 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005368 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005369 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005370 }
5371
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005372 {
5373 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5374 // Request a RAW buffer if
5375 // 1. mHdrPlusRawSrcChannel is valid.
5376 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5377 // 3. There is no pending HDR+ request.
5378 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5379 mHdrPlusPendingRequests.size() == 0) {
5380 streamsArray.stream_request[streamsArray.num_streams].streamID =
5381 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5382 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5383 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005384 }
5385
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005386 //extract capture intent
5387 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5388 mCaptureIntent =
5389 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5390 }
5391
5392 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5393 mCacMode =
5394 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5395 }
5396
5397 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005398 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005399
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005400 {
5401 Mutex::Autolock l(gHdrPlusClientLock);
5402 // If this request has a still capture intent, try to submit an HDR+ request.
5403 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5404 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5405 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5406 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005407 }
5408
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005409 if (hdrPlusRequest) {
5410 // For a HDR+ request, just set the frame parameters.
5411 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5412 if (rc < 0) {
5413 LOGE("fail to set frame parameters");
5414 pthread_mutex_unlock(&mMutex);
5415 return rc;
5416 }
5417 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005418 /* Parse the settings:
5419 * - For every request in NORMAL MODE
5420 * - For every request in HFR mode during preview only case
5421 * - For first request of every batch in HFR mode during video
5422 * recording. In batchmode the same settings except frame number is
5423 * repeated in each request of the batch.
5424 */
5425 if (!mBatchSize ||
5426 (mBatchSize && !isVidBufRequested) ||
5427 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005428 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005429 if (rc < 0) {
5430 LOGE("fail to set frame parameters");
5431 pthread_mutex_unlock(&mMutex);
5432 return rc;
5433 }
5434 }
5435 /* For batchMode HFR, setFrameParameters is not called for every
5436 * request. But only frame number of the latest request is parsed.
5437 * Keep track of first and last frame numbers in a batch so that
5438 * metadata for the frame numbers of batch can be duplicated in
5439 * handleBatchMetadta */
5440 if (mBatchSize) {
5441 if (!mToBeQueuedVidBufs) {
5442 //start of the batch
5443 mFirstFrameNumberInBatch = request->frame_number;
5444 }
5445 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5446 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5447 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005448 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005449 return BAD_VALUE;
5450 }
5451 }
5452 if (mNeedSensorRestart) {
5453 /* Unlock the mutex as restartSensor waits on the channels to be
5454 * stopped, which in turn calls stream callback functions -
5455 * handleBufferWithLock and handleMetadataWithLock */
5456 pthread_mutex_unlock(&mMutex);
5457 rc = dynamicUpdateMetaStreamInfo();
5458 if (rc != NO_ERROR) {
5459 LOGE("Restarting the sensor failed");
5460 return BAD_VALUE;
5461 }
5462 mNeedSensorRestart = false;
5463 pthread_mutex_lock(&mMutex);
5464 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005465 if(mResetInstantAEC) {
5466 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5467 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5468 mResetInstantAEC = false;
5469 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005470 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005471 if (request->input_buffer->acquire_fence != -1) {
5472 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5473 close(request->input_buffer->acquire_fence);
5474 if (rc != OK) {
5475 LOGE("input buffer sync wait failed %d", rc);
5476 pthread_mutex_unlock(&mMutex);
5477 return rc;
5478 }
5479 }
5480 }
5481
5482 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5483 mLastCustIntentFrmNum = frameNumber;
5484 }
5485 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005486 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005487 pendingRequestIterator latestRequest;
5488 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005489 pendingRequest.num_buffers = depthRequestPresent ?
5490 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005491 pendingRequest.request_id = request_id;
5492 pendingRequest.blob_request = blob_request;
5493 pendingRequest.timestamp = 0;
5494 pendingRequest.bUrgentReceived = 0;
5495 if (request->input_buffer) {
5496 pendingRequest.input_buffer =
5497 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5498 *(pendingRequest.input_buffer) = *(request->input_buffer);
5499 pInputBuffer = pendingRequest.input_buffer;
5500 } else {
5501 pendingRequest.input_buffer = NULL;
5502 pInputBuffer = NULL;
5503 }
5504
5505 pendingRequest.pipeline_depth = 0;
5506 pendingRequest.partial_result_cnt = 0;
5507 extractJpegMetadata(mCurJpegMeta, request);
5508 pendingRequest.jpegMetadata = mCurJpegMeta;
5509 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5510 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005511 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005512 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5513 mHybridAeEnable =
5514 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5515 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005516
5517 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5518 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005519 /* DevCamDebug metadata processCaptureRequest */
5520 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5521 mDevCamDebugMetaEnable =
5522 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5523 }
5524 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5525 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005526
5527 //extract CAC info
5528 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5529 mCacMode =
5530 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5531 }
5532 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005533 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005534
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005535 // extract enableZsl info
5536 if (gExposeEnableZslKey) {
5537 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5538 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5539 mZslEnabled = pendingRequest.enableZsl;
5540 } else {
5541 pendingRequest.enableZsl = mZslEnabled;
5542 }
5543 }
5544
Thierry Strudel3d639192016-09-09 11:52:26 -07005545 PendingBuffersInRequest bufsForCurRequest;
5546 bufsForCurRequest.frame_number = frameNumber;
5547 // Mark current timestamp for the new request
5548 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005549 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005550
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005551 if (hdrPlusRequest) {
5552 // Save settings for this request.
5553 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5554 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5555
5556 // Add to pending HDR+ request queue.
5557 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5558 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5559
5560 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5561 }
5562
Thierry Strudel3d639192016-09-09 11:52:26 -07005563 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005564 if ((request->output_buffers[i].stream->data_space ==
5565 HAL_DATASPACE_DEPTH) &&
5566 (HAL_PIXEL_FORMAT_BLOB ==
5567 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005568 continue;
5569 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005570 RequestedBufferInfo requestedBuf;
5571 memset(&requestedBuf, 0, sizeof(requestedBuf));
5572 requestedBuf.stream = request->output_buffers[i].stream;
5573 requestedBuf.buffer = NULL;
5574 pendingRequest.buffers.push_back(requestedBuf);
5575
5576 // Add to buffer handle the pending buffers list
5577 PendingBufferInfo bufferInfo;
5578 bufferInfo.buffer = request->output_buffers[i].buffer;
5579 bufferInfo.stream = request->output_buffers[i].stream;
5580 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5581 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5582 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5583 frameNumber, bufferInfo.buffer,
5584 channel->getStreamTypeMask(), bufferInfo.stream->format);
5585 }
5586 // Add this request packet into mPendingBuffersMap
5587 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5588 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5589 mPendingBuffersMap.get_num_overall_buffers());
5590
5591 latestRequest = mPendingRequestsList.insert(
5592 mPendingRequestsList.end(), pendingRequest);
5593 if(mFlush) {
5594 LOGI("mFlush is true");
5595 pthread_mutex_unlock(&mMutex);
5596 return NO_ERROR;
5597 }
5598
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005599 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5600 // channel.
5601 if (!hdrPlusRequest) {
5602 int indexUsed;
5603 // Notify metadata channel we receive a request
5604 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005605
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005606 if(request->input_buffer != NULL){
5607 LOGD("Input request, frame_number %d", frameNumber);
5608 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5609 if (NO_ERROR != rc) {
5610 LOGE("fail to set reproc parameters");
5611 pthread_mutex_unlock(&mMutex);
5612 return rc;
5613 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005614 }
5615
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005616 // Call request on other streams
5617 uint32_t streams_need_metadata = 0;
5618 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5619 for (size_t i = 0; i < request->num_output_buffers; i++) {
5620 const camera3_stream_buffer_t& output = request->output_buffers[i];
5621 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5622
5623 if (channel == NULL) {
5624 LOGW("invalid channel pointer for stream");
5625 continue;
5626 }
5627
5628 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5629 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5630 output.buffer, request->input_buffer, frameNumber);
5631 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005632 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005633 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5634 if (rc < 0) {
5635 LOGE("Fail to request on picture channel");
5636 pthread_mutex_unlock(&mMutex);
5637 return rc;
5638 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005639 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005640 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5641 assert(NULL != mDepthChannel);
5642 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005643
Emilian Peev7650c122017-01-19 08:24:33 -08005644 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5645 if (rc < 0) {
5646 LOGE("Fail to map on depth buffer");
5647 pthread_mutex_unlock(&mMutex);
5648 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005649 }
Emilian Peev7650c122017-01-19 08:24:33 -08005650 } else {
5651 LOGD("snapshot request with buffer %p, frame_number %d",
5652 output.buffer, frameNumber);
5653 if (!request->settings) {
5654 rc = channel->request(output.buffer, frameNumber,
5655 NULL, mPrevParameters, indexUsed);
5656 } else {
5657 rc = channel->request(output.buffer, frameNumber,
5658 NULL, mParameters, indexUsed);
5659 }
5660 if (rc < 0) {
5661 LOGE("Fail to request on picture channel");
5662 pthread_mutex_unlock(&mMutex);
5663 return rc;
5664 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005665
Emilian Peev7650c122017-01-19 08:24:33 -08005666 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5667 uint32_t j = 0;
5668 for (j = 0; j < streamsArray.num_streams; j++) {
5669 if (streamsArray.stream_request[j].streamID == streamId) {
5670 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5671 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5672 else
5673 streamsArray.stream_request[j].buf_index = indexUsed;
5674 break;
5675 }
5676 }
5677 if (j == streamsArray.num_streams) {
5678 LOGE("Did not find matching stream to update index");
5679 assert(0);
5680 }
5681
5682 pendingBufferIter->need_metadata = true;
5683 streams_need_metadata++;
5684 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005685 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005686 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5687 bool needMetadata = false;
5688 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5689 rc = yuvChannel->request(output.buffer, frameNumber,
5690 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5691 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005692 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005693 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005694 pthread_mutex_unlock(&mMutex);
5695 return rc;
5696 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005697
5698 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5699 uint32_t j = 0;
5700 for (j = 0; j < streamsArray.num_streams; j++) {
5701 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005702 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5703 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5704 else
5705 streamsArray.stream_request[j].buf_index = indexUsed;
5706 break;
5707 }
5708 }
5709 if (j == streamsArray.num_streams) {
5710 LOGE("Did not find matching stream to update index");
5711 assert(0);
5712 }
5713
5714 pendingBufferIter->need_metadata = needMetadata;
5715 if (needMetadata)
5716 streams_need_metadata += 1;
5717 LOGD("calling YUV channel request, need_metadata is %d",
5718 needMetadata);
5719 } else {
5720 LOGD("request with buffer %p, frame_number %d",
5721 output.buffer, frameNumber);
5722
5723 rc = channel->request(output.buffer, frameNumber, indexUsed);
5724
5725 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5726 uint32_t j = 0;
5727 for (j = 0; j < streamsArray.num_streams; j++) {
5728 if (streamsArray.stream_request[j].streamID == streamId) {
5729 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5730 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5731 else
5732 streamsArray.stream_request[j].buf_index = indexUsed;
5733 break;
5734 }
5735 }
5736 if (j == streamsArray.num_streams) {
5737 LOGE("Did not find matching stream to update index");
5738 assert(0);
5739 }
5740
5741 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5742 && mBatchSize) {
5743 mToBeQueuedVidBufs++;
5744 if (mToBeQueuedVidBufs == mBatchSize) {
5745 channel->queueBatchBuf();
5746 }
5747 }
5748 if (rc < 0) {
5749 LOGE("request failed");
5750 pthread_mutex_unlock(&mMutex);
5751 return rc;
5752 }
5753 }
5754 pendingBufferIter++;
5755 }
5756
5757 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5758 itr++) {
5759 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5760
5761 if (channel == NULL) {
5762 LOGE("invalid channel pointer for stream");
5763 assert(0);
5764 return BAD_VALUE;
5765 }
5766
5767 InternalRequest requestedStream;
5768 requestedStream = (*itr);
5769
5770
5771 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5772 LOGD("snapshot request internally input buffer %p, frame_number %d",
5773 request->input_buffer, frameNumber);
5774 if(request->input_buffer != NULL){
5775 rc = channel->request(NULL, frameNumber,
5776 pInputBuffer, &mReprocMeta, indexUsed, true,
5777 requestedStream.meteringOnly);
5778 if (rc < 0) {
5779 LOGE("Fail to request on picture channel");
5780 pthread_mutex_unlock(&mMutex);
5781 return rc;
5782 }
5783 } else {
5784 LOGD("snapshot request with frame_number %d", frameNumber);
5785 if (!request->settings) {
5786 rc = channel->request(NULL, frameNumber,
5787 NULL, mPrevParameters, indexUsed, true,
5788 requestedStream.meteringOnly);
5789 } else {
5790 rc = channel->request(NULL, frameNumber,
5791 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5792 }
5793 if (rc < 0) {
5794 LOGE("Fail to request on picture channel");
5795 pthread_mutex_unlock(&mMutex);
5796 return rc;
5797 }
5798
5799 if ((*itr).meteringOnly != 1) {
5800 requestedStream.need_metadata = 1;
5801 streams_need_metadata++;
5802 }
5803 }
5804
5805 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5806 uint32_t j = 0;
5807 for (j = 0; j < streamsArray.num_streams; j++) {
5808 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005809 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5810 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5811 else
5812 streamsArray.stream_request[j].buf_index = indexUsed;
5813 break;
5814 }
5815 }
5816 if (j == streamsArray.num_streams) {
5817 LOGE("Did not find matching stream to update index");
5818 assert(0);
5819 }
5820
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005821 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005822 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005823 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005824 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005825 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005826 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005827 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005828
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005829 //If 2 streams have need_metadata set to true, fail the request, unless
5830 //we copy/reference count the metadata buffer
5831 if (streams_need_metadata > 1) {
5832 LOGE("not supporting request in which two streams requires"
5833 " 2 HAL metadata for reprocessing");
5834 pthread_mutex_unlock(&mMutex);
5835 return -EINVAL;
5836 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005837
Emilian Peev7650c122017-01-19 08:24:33 -08005838 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5839 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5840 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5841 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5842 pthread_mutex_unlock(&mMutex);
5843 return BAD_VALUE;
5844 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005845 if (request->input_buffer == NULL) {
5846 /* Set the parameters to backend:
5847 * - For every request in NORMAL MODE
5848 * - For every request in HFR mode during preview only case
5849 * - Once every batch in HFR mode during video recording
5850 */
5851 if (!mBatchSize ||
5852 (mBatchSize && !isVidBufRequested) ||
5853 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5854 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5855 mBatchSize, isVidBufRequested,
5856 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005857
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005858 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5859 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5860 uint32_t m = 0;
5861 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5862 if (streamsArray.stream_request[k].streamID ==
5863 mBatchedStreamsArray.stream_request[m].streamID)
5864 break;
5865 }
5866 if (m == mBatchedStreamsArray.num_streams) {
5867 mBatchedStreamsArray.stream_request\
5868 [mBatchedStreamsArray.num_streams].streamID =
5869 streamsArray.stream_request[k].streamID;
5870 mBatchedStreamsArray.stream_request\
5871 [mBatchedStreamsArray.num_streams].buf_index =
5872 streamsArray.stream_request[k].buf_index;
5873 mBatchedStreamsArray.num_streams =
5874 mBatchedStreamsArray.num_streams + 1;
5875 }
5876 }
5877 streamsArray = mBatchedStreamsArray;
5878 }
5879 /* Update stream id of all the requested buffers */
5880 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5881 streamsArray)) {
5882 LOGE("Failed to set stream type mask in the parameters");
5883 return BAD_VALUE;
5884 }
5885
5886 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5887 mParameters);
5888 if (rc < 0) {
5889 LOGE("set_parms failed");
5890 }
5891 /* reset to zero coz, the batch is queued */
5892 mToBeQueuedVidBufs = 0;
5893 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5894 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5895 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005896 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5897 uint32_t m = 0;
5898 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5899 if (streamsArray.stream_request[k].streamID ==
5900 mBatchedStreamsArray.stream_request[m].streamID)
5901 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005902 }
5903 if (m == mBatchedStreamsArray.num_streams) {
5904 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5905 streamID = streamsArray.stream_request[k].streamID;
5906 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5907 buf_index = streamsArray.stream_request[k].buf_index;
5908 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5909 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005910 }
5911 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005912 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005913 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005914 }
5915
5916 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5917
5918 mState = STARTED;
5919 // Added a timed condition wait
5920 struct timespec ts;
5921 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005922 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005923 if (rc < 0) {
5924 isValidTimeout = 0;
5925 LOGE("Error reading the real time clock!!");
5926 }
5927 else {
5928 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005929 int64_t timeout = 5;
5930 {
5931 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5932 // If there is a pending HDR+ request, the following requests may be blocked until the
5933 // HDR+ request is done. So allow a longer timeout.
5934 if (mHdrPlusPendingRequests.size() > 0) {
5935 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5936 }
5937 }
5938 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005939 }
5940 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005941 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005942 (mState != ERROR) && (mState != DEINIT)) {
5943 if (!isValidTimeout) {
5944 LOGD("Blocking on conditional wait");
5945 pthread_cond_wait(&mRequestCond, &mMutex);
5946 }
5947 else {
5948 LOGD("Blocking on timed conditional wait");
5949 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5950 if (rc == ETIMEDOUT) {
5951 rc = -ENODEV;
5952 LOGE("Unblocked on timeout!!!!");
5953 break;
5954 }
5955 }
5956 LOGD("Unblocked");
5957 if (mWokenUpByDaemon) {
5958 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005959 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005960 break;
5961 }
5962 }
5963 pthread_mutex_unlock(&mMutex);
5964
5965 return rc;
5966}
5967
5968/*===========================================================================
5969 * FUNCTION : dump
5970 *
5971 * DESCRIPTION:
5972 *
5973 * PARAMETERS :
5974 *
5975 *
5976 * RETURN :
5977 *==========================================================================*/
5978void QCamera3HardwareInterface::dump(int fd)
5979{
5980 pthread_mutex_lock(&mMutex);
5981 dprintf(fd, "\n Camera HAL3 information Begin \n");
5982
5983 dprintf(fd, "\nNumber of pending requests: %zu \n",
5984 mPendingRequestsList.size());
5985 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5986 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5987 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5988 for(pendingRequestIterator i = mPendingRequestsList.begin();
5989 i != mPendingRequestsList.end(); i++) {
5990 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5991 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5992 i->input_buffer);
5993 }
5994 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5995 mPendingBuffersMap.get_num_overall_buffers());
5996 dprintf(fd, "-------+------------------\n");
5997 dprintf(fd, " Frame | Stream type mask \n");
5998 dprintf(fd, "-------+------------------\n");
5999 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6000 for(auto &j : req.mPendingBufferList) {
6001 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6002 dprintf(fd, " %5d | %11d \n",
6003 req.frame_number, channel->getStreamTypeMask());
6004 }
6005 }
6006 dprintf(fd, "-------+------------------\n");
6007
6008 dprintf(fd, "\nPending frame drop list: %zu\n",
6009 mPendingFrameDropList.size());
6010 dprintf(fd, "-------+-----------\n");
6011 dprintf(fd, " Frame | Stream ID \n");
6012 dprintf(fd, "-------+-----------\n");
6013 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6014 i != mPendingFrameDropList.end(); i++) {
6015 dprintf(fd, " %5d | %9d \n",
6016 i->frame_number, i->stream_ID);
6017 }
6018 dprintf(fd, "-------+-----------\n");
6019
6020 dprintf(fd, "\n Camera HAL3 information End \n");
6021
6022 /* use dumpsys media.camera as trigger to send update debug level event */
6023 mUpdateDebugLevel = true;
6024 pthread_mutex_unlock(&mMutex);
6025 return;
6026}
6027
6028/*===========================================================================
6029 * FUNCTION : flush
6030 *
6031 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6032 * conditionally restarts channels
6033 *
6034 * PARAMETERS :
6035 * @ restartChannels: re-start all channels
6036 *
6037 *
6038 * RETURN :
6039 * 0 on success
6040 * Error code on failure
6041 *==========================================================================*/
6042int QCamera3HardwareInterface::flush(bool restartChannels)
6043{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006044 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006045 int32_t rc = NO_ERROR;
6046
6047 LOGD("Unblocking Process Capture Request");
6048 pthread_mutex_lock(&mMutex);
6049 mFlush = true;
6050 pthread_mutex_unlock(&mMutex);
6051
6052 rc = stopAllChannels();
6053 // unlink of dualcam
6054 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006055 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6056 &m_pDualCamCmdPtr->bundle_info;
6057 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006058 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6059 pthread_mutex_lock(&gCamLock);
6060
6061 if (mIsMainCamera == 1) {
6062 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6063 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006064 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006065 // related session id should be session id of linked session
6066 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6067 } else {
6068 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6069 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006070 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006071 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6072 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006073 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006074 pthread_mutex_unlock(&gCamLock);
6075
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006076 rc = mCameraHandle->ops->set_dual_cam_cmd(
6077 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006078 if (rc < 0) {
6079 LOGE("Dualcam: Unlink failed, but still proceed to close");
6080 }
6081 }
6082
6083 if (rc < 0) {
6084 LOGE("stopAllChannels failed");
6085 return rc;
6086 }
6087 if (mChannelHandle) {
6088 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6089 mChannelHandle);
6090 }
6091
6092 // Reset bundle info
6093 rc = setBundleInfo();
6094 if (rc < 0) {
6095 LOGE("setBundleInfo failed %d", rc);
6096 return rc;
6097 }
6098
6099 // Mutex Lock
6100 pthread_mutex_lock(&mMutex);
6101
6102 // Unblock process_capture_request
6103 mPendingLiveRequest = 0;
6104 pthread_cond_signal(&mRequestCond);
6105
6106 rc = notifyErrorForPendingRequests();
6107 if (rc < 0) {
6108 LOGE("notifyErrorForPendingRequests failed");
6109 pthread_mutex_unlock(&mMutex);
6110 return rc;
6111 }
6112
6113 mFlush = false;
6114
6115 // Start the Streams/Channels
6116 if (restartChannels) {
6117 rc = startAllChannels();
6118 if (rc < 0) {
6119 LOGE("startAllChannels failed");
6120 pthread_mutex_unlock(&mMutex);
6121 return rc;
6122 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006123 if (mChannelHandle) {
6124 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6125 mChannelHandle);
6126 if (rc < 0) {
6127 LOGE("start_channel failed");
6128 pthread_mutex_unlock(&mMutex);
6129 return rc;
6130 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006131 }
6132 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006133 pthread_mutex_unlock(&mMutex);
6134
6135 return 0;
6136}
6137
6138/*===========================================================================
6139 * FUNCTION : flushPerf
6140 *
6141 * DESCRIPTION: This is the performance optimization version of flush that does
6142 * not use stream off, rather flushes the system
6143 *
6144 * PARAMETERS :
6145 *
6146 *
6147 * RETURN : 0 : success
6148 * -EINVAL: input is malformed (device is not valid)
6149 * -ENODEV: if the device has encountered a serious error
6150 *==========================================================================*/
6151int QCamera3HardwareInterface::flushPerf()
6152{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006153 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006154 int32_t rc = 0;
6155 struct timespec timeout;
6156 bool timed_wait = false;
6157
6158 pthread_mutex_lock(&mMutex);
6159 mFlushPerf = true;
6160 mPendingBuffersMap.numPendingBufsAtFlush =
6161 mPendingBuffersMap.get_num_overall_buffers();
6162 LOGD("Calling flush. Wait for %d buffers to return",
6163 mPendingBuffersMap.numPendingBufsAtFlush);
6164
6165 /* send the flush event to the backend */
6166 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6167 if (rc < 0) {
6168 LOGE("Error in flush: IOCTL failure");
6169 mFlushPerf = false;
6170 pthread_mutex_unlock(&mMutex);
6171 return -ENODEV;
6172 }
6173
6174 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6175 LOGD("No pending buffers in HAL, return flush");
6176 mFlushPerf = false;
6177 pthread_mutex_unlock(&mMutex);
6178 return rc;
6179 }
6180
6181 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006182 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006183 if (rc < 0) {
6184 LOGE("Error reading the real time clock, cannot use timed wait");
6185 } else {
6186 timeout.tv_sec += FLUSH_TIMEOUT;
6187 timed_wait = true;
6188 }
6189
6190 //Block on conditional variable
6191 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6192 LOGD("Waiting on mBuffersCond");
6193 if (!timed_wait) {
6194 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6195 if (rc != 0) {
6196 LOGE("pthread_cond_wait failed due to rc = %s",
6197 strerror(rc));
6198 break;
6199 }
6200 } else {
6201 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6202 if (rc != 0) {
6203 LOGE("pthread_cond_timedwait failed due to rc = %s",
6204 strerror(rc));
6205 break;
6206 }
6207 }
6208 }
6209 if (rc != 0) {
6210 mFlushPerf = false;
6211 pthread_mutex_unlock(&mMutex);
6212 return -ENODEV;
6213 }
6214
6215 LOGD("Received buffers, now safe to return them");
6216
6217 //make sure the channels handle flush
6218 //currently only required for the picture channel to release snapshot resources
6219 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6220 it != mStreamInfo.end(); it++) {
6221 QCamera3Channel *channel = (*it)->channel;
6222 if (channel) {
6223 rc = channel->flush();
6224 if (rc) {
6225 LOGE("Flushing the channels failed with error %d", rc);
6226 // even though the channel flush failed we need to continue and
6227 // return the buffers we have to the framework, however the return
6228 // value will be an error
6229 rc = -ENODEV;
6230 }
6231 }
6232 }
6233
6234 /* notify the frameworks and send errored results */
6235 rc = notifyErrorForPendingRequests();
6236 if (rc < 0) {
6237 LOGE("notifyErrorForPendingRequests failed");
6238 pthread_mutex_unlock(&mMutex);
6239 return rc;
6240 }
6241
6242 //unblock process_capture_request
6243 mPendingLiveRequest = 0;
6244 unblockRequestIfNecessary();
6245
6246 mFlushPerf = false;
6247 pthread_mutex_unlock(&mMutex);
6248 LOGD ("Flush Operation complete. rc = %d", rc);
6249 return rc;
6250}
6251
6252/*===========================================================================
6253 * FUNCTION : handleCameraDeviceError
6254 *
6255 * DESCRIPTION: This function calls internal flush and notifies the error to
6256 * framework and updates the state variable.
6257 *
6258 * PARAMETERS : None
6259 *
6260 * RETURN : NO_ERROR on Success
6261 * Error code on failure
6262 *==========================================================================*/
6263int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6264{
6265 int32_t rc = NO_ERROR;
6266
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006267 {
6268 Mutex::Autolock lock(mFlushLock);
6269 pthread_mutex_lock(&mMutex);
6270 if (mState != ERROR) {
6271 //if mState != ERROR, nothing to be done
6272 pthread_mutex_unlock(&mMutex);
6273 return NO_ERROR;
6274 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006275 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006276
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006277 rc = flush(false /* restart channels */);
6278 if (NO_ERROR != rc) {
6279 LOGE("internal flush to handle mState = ERROR failed");
6280 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006281
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006282 pthread_mutex_lock(&mMutex);
6283 mState = DEINIT;
6284 pthread_mutex_unlock(&mMutex);
6285 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006286
6287 camera3_notify_msg_t notify_msg;
6288 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6289 notify_msg.type = CAMERA3_MSG_ERROR;
6290 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6291 notify_msg.message.error.error_stream = NULL;
6292 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006293 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006294
6295 return rc;
6296}
6297
6298/*===========================================================================
6299 * FUNCTION : captureResultCb
6300 *
6301 * DESCRIPTION: Callback handler for all capture result
6302 * (streams, as well as metadata)
6303 *
6304 * PARAMETERS :
6305 * @metadata : metadata information
6306 * @buffer : actual gralloc buffer to be returned to frameworks.
6307 * NULL if metadata.
6308 *
6309 * RETURN : NONE
6310 *==========================================================================*/
6311void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6312 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6313{
6314 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006315 pthread_mutex_lock(&mMutex);
6316 uint8_t batchSize = mBatchSize;
6317 pthread_mutex_unlock(&mMutex);
6318 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006319 handleBatchMetadata(metadata_buf,
6320 true /* free_and_bufdone_meta_buf */);
6321 } else { /* mBatchSize = 0 */
6322 hdrPlusPerfLock(metadata_buf);
6323 pthread_mutex_lock(&mMutex);
6324 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006325 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006326 true /* last urgent frame of batch metadata */,
6327 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006328 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006329 pthread_mutex_unlock(&mMutex);
6330 }
6331 } else if (isInputBuffer) {
6332 pthread_mutex_lock(&mMutex);
6333 handleInputBufferWithLock(frame_number);
6334 pthread_mutex_unlock(&mMutex);
6335 } else {
6336 pthread_mutex_lock(&mMutex);
6337 handleBufferWithLock(buffer, frame_number);
6338 pthread_mutex_unlock(&mMutex);
6339 }
6340 return;
6341}
6342
6343/*===========================================================================
6344 * FUNCTION : getReprocessibleOutputStreamId
6345 *
6346 * DESCRIPTION: Get source output stream id for the input reprocess stream
6347 * based on size and format, which would be the largest
6348 * output stream if an input stream exists.
6349 *
6350 * PARAMETERS :
6351 * @id : return the stream id if found
6352 *
6353 * RETURN : int32_t type of status
6354 * NO_ERROR -- success
6355 * none-zero failure code
6356 *==========================================================================*/
6357int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6358{
6359 /* check if any output or bidirectional stream with the same size and format
6360 and return that stream */
6361 if ((mInputStreamInfo.dim.width > 0) &&
6362 (mInputStreamInfo.dim.height > 0)) {
6363 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6364 it != mStreamInfo.end(); it++) {
6365
6366 camera3_stream_t *stream = (*it)->stream;
6367 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6368 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6369 (stream->format == mInputStreamInfo.format)) {
6370 // Usage flag for an input stream and the source output stream
6371 // may be different.
6372 LOGD("Found reprocessible output stream! %p", *it);
6373 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6374 stream->usage, mInputStreamInfo.usage);
6375
6376 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6377 if (channel != NULL && channel->mStreams[0]) {
6378 id = channel->mStreams[0]->getMyServerID();
6379 return NO_ERROR;
6380 }
6381 }
6382 }
6383 } else {
6384 LOGD("No input stream, so no reprocessible output stream");
6385 }
6386 return NAME_NOT_FOUND;
6387}
6388
6389/*===========================================================================
6390 * FUNCTION : lookupFwkName
6391 *
6392 * DESCRIPTION: In case the enum is not same in fwk and backend
6393 * make sure the parameter is correctly propogated
6394 *
6395 * PARAMETERS :
6396 * @arr : map between the two enums
6397 * @len : len of the map
6398 * @hal_name : name of the hal_parm to map
6399 *
6400 * RETURN : int type of status
6401 * fwk_name -- success
6402 * none-zero failure code
6403 *==========================================================================*/
6404template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6405 size_t len, halType hal_name)
6406{
6407
6408 for (size_t i = 0; i < len; i++) {
6409 if (arr[i].hal_name == hal_name) {
6410 return arr[i].fwk_name;
6411 }
6412 }
6413
6414 /* Not able to find matching framework type is not necessarily
6415 * an error case. This happens when mm-camera supports more attributes
6416 * than the frameworks do */
6417 LOGH("Cannot find matching framework type");
6418 return NAME_NOT_FOUND;
6419}
6420
6421/*===========================================================================
6422 * FUNCTION : lookupHalName
6423 *
6424 * DESCRIPTION: In case the enum is not same in fwk and backend
6425 * make sure the parameter is correctly propogated
6426 *
6427 * PARAMETERS :
6428 * @arr : map between the two enums
6429 * @len : len of the map
6430 * @fwk_name : name of the hal_parm to map
6431 *
6432 * RETURN : int32_t type of status
6433 * hal_name -- success
6434 * none-zero failure code
6435 *==========================================================================*/
6436template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6437 size_t len, fwkType fwk_name)
6438{
6439 for (size_t i = 0; i < len; i++) {
6440 if (arr[i].fwk_name == fwk_name) {
6441 return arr[i].hal_name;
6442 }
6443 }
6444
6445 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6446 return NAME_NOT_FOUND;
6447}
6448
6449/*===========================================================================
6450 * FUNCTION : lookupProp
6451 *
6452 * DESCRIPTION: lookup a value by its name
6453 *
6454 * PARAMETERS :
6455 * @arr : map between the two enums
6456 * @len : size of the map
6457 * @name : name to be looked up
6458 *
6459 * RETURN : Value if found
6460 * CAM_CDS_MODE_MAX if not found
6461 *==========================================================================*/
6462template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6463 size_t len, const char *name)
6464{
6465 if (name) {
6466 for (size_t i = 0; i < len; i++) {
6467 if (!strcmp(arr[i].desc, name)) {
6468 return arr[i].val;
6469 }
6470 }
6471 }
6472 return CAM_CDS_MODE_MAX;
6473}
6474
6475/*===========================================================================
6476 *
6477 * DESCRIPTION:
6478 *
6479 * PARAMETERS :
6480 * @metadata : metadata information from callback
6481 * @timestamp: metadata buffer timestamp
6482 * @request_id: request id
6483 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006484 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006485 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6486 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006487 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006488 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6489 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006490 *
6491 * RETURN : camera_metadata_t*
6492 * metadata in a format specified by fwk
6493 *==========================================================================*/
6494camera_metadata_t*
6495QCamera3HardwareInterface::translateFromHalMetadata(
6496 metadata_buffer_t *metadata,
6497 nsecs_t timestamp,
6498 int32_t request_id,
6499 const CameraMetadata& jpegMetadata,
6500 uint8_t pipeline_depth,
6501 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006502 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006503 /* DevCamDebug metadata translateFromHalMetadata argument */
6504 uint8_t DevCamDebug_meta_enable,
6505 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006506 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006507 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006508 bool lastMetadataInBatch,
6509 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006510{
6511 CameraMetadata camMetadata;
6512 camera_metadata_t *resultMetadata;
6513
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006514 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006515 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6516 * Timestamp is needed because it's used for shutter notify calculation.
6517 * */
6518 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6519 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006520 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006521 }
6522
Thierry Strudel3d639192016-09-09 11:52:26 -07006523 if (jpegMetadata.entryCount())
6524 camMetadata.append(jpegMetadata);
6525
6526 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6527 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6528 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6529 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006530 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006531 if (mBatchSize == 0) {
6532 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6533 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6534 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006535
Samuel Ha68ba5172016-12-15 18:41:12 -08006536 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6537 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6538 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6539 // DevCamDebug metadata translateFromHalMetadata AF
6540 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6541 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6542 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6543 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6544 }
6545 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6546 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6547 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6548 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6549 }
6550 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6551 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6552 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6553 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6554 }
6555 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6556 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6557 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6558 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6559 }
6560 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6561 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6562 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6563 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6564 }
6565 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6566 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6567 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6568 *DevCamDebug_af_monitor_pdaf_target_pos;
6569 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6570 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6571 }
6572 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6573 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6574 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6575 *DevCamDebug_af_monitor_pdaf_confidence;
6576 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6577 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6578 }
6579 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6580 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6581 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6582 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6583 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6584 }
6585 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6586 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6587 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6588 *DevCamDebug_af_monitor_tof_target_pos;
6589 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6590 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6591 }
6592 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6593 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6594 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6595 *DevCamDebug_af_monitor_tof_confidence;
6596 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6597 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6598 }
6599 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6600 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6601 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6602 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6603 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6604 }
6605 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6606 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6607 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6608 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6609 &fwk_DevCamDebug_af_monitor_type_select, 1);
6610 }
6611 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6612 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6613 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6614 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6615 &fwk_DevCamDebug_af_monitor_refocus, 1);
6616 }
6617 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6618 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6619 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6620 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6621 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6622 }
6623 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6624 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6625 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6626 *DevCamDebug_af_search_pdaf_target_pos;
6627 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6628 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6629 }
6630 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6631 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6632 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6633 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6634 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6635 }
6636 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6637 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6638 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6639 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6640 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6641 }
6642 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6643 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6644 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6645 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6646 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6647 }
6648 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6649 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6650 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6651 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6652 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6653 }
6654 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6655 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6656 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6657 *DevCamDebug_af_search_tof_target_pos;
6658 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6659 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6660 }
6661 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6662 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6663 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6664 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6665 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6666 }
6667 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6668 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6669 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6670 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6671 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6672 }
6673 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6674 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6675 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6676 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6677 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6678 }
6679 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6680 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6681 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6682 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6683 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6684 }
6685 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6686 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6687 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6688 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6689 &fwk_DevCamDebug_af_search_type_select, 1);
6690 }
6691 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6692 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6693 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6694 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6695 &fwk_DevCamDebug_af_search_next_pos, 1);
6696 }
6697 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6698 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6699 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6700 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6701 &fwk_DevCamDebug_af_search_target_pos, 1);
6702 }
6703 // DevCamDebug metadata translateFromHalMetadata AEC
6704 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6705 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6706 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6707 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6708 }
6709 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6710 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6711 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6712 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6713 }
6714 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6715 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6716 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6717 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6718 }
6719 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6720 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6721 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6722 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6723 }
6724 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6725 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6726 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6727 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6728 }
6729 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6730 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6731 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6732 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6733 }
6734 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6735 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6736 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6737 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6738 }
6739 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6740 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6741 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6742 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6743 }
Samuel Ha34229982017-02-17 13:51:11 -08006744 // DevCamDebug metadata translateFromHalMetadata zzHDR
6745 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6746 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6747 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6748 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6749 }
6750 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6751 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006752 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006753 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6754 }
6755 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6756 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6757 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6758 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6759 }
6760 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6761 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006762 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006763 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6764 }
6765 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6766 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6767 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6768 *DevCamDebug_aec_hdr_sensitivity_ratio;
6769 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6770 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6771 }
6772 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6773 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6774 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6775 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6776 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6777 }
6778 // DevCamDebug metadata translateFromHalMetadata ADRC
6779 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6780 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6781 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6782 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6783 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6784 }
6785 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6786 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6787 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6788 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6789 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6790 }
6791 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6792 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6793 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6794 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6795 }
6796 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6797 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6798 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6799 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6800 }
6801 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6802 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6803 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6804 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6805 }
6806 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6807 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6808 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6809 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6810 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006811 // DevCamDebug metadata translateFromHalMetadata AWB
6812 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6813 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6814 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6815 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6816 }
6817 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6818 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6819 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6820 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6821 }
6822 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6823 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6824 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6825 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6826 }
6827 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6828 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6829 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6830 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6831 }
6832 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6833 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6834 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6835 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6836 }
6837 }
6838 // atrace_end(ATRACE_TAG_ALWAYS);
6839
Thierry Strudel3d639192016-09-09 11:52:26 -07006840 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6841 int64_t fwk_frame_number = *frame_number;
6842 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6843 }
6844
6845 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6846 int32_t fps_range[2];
6847 fps_range[0] = (int32_t)float_range->min_fps;
6848 fps_range[1] = (int32_t)float_range->max_fps;
6849 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6850 fps_range, 2);
6851 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6852 fps_range[0], fps_range[1]);
6853 }
6854
6855 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6856 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6857 }
6858
6859 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6860 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6861 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6862 *sceneMode);
6863 if (NAME_NOT_FOUND != val) {
6864 uint8_t fwkSceneMode = (uint8_t)val;
6865 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6866 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6867 fwkSceneMode);
6868 }
6869 }
6870
6871 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6872 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6873 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6874 }
6875
6876 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6877 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6878 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6879 }
6880
6881 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6882 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6883 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6884 }
6885
6886 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6887 CAM_INTF_META_EDGE_MODE, metadata) {
6888 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6889 }
6890
6891 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6892 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6893 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6894 }
6895
6896 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6897 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6898 }
6899
6900 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6901 if (0 <= *flashState) {
6902 uint8_t fwk_flashState = (uint8_t) *flashState;
6903 if (!gCamCapability[mCameraId]->flash_available) {
6904 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6905 }
6906 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6907 }
6908 }
6909
6910 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6911 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6912 if (NAME_NOT_FOUND != val) {
6913 uint8_t fwk_flashMode = (uint8_t)val;
6914 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6915 }
6916 }
6917
6918 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6919 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6920 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6921 }
6922
6923 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6924 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6925 }
6926
6927 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6928 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6929 }
6930
6931 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6932 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6933 }
6934
6935 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6936 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6937 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6938 }
6939
6940 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6941 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6942 LOGD("fwk_videoStab = %d", fwk_videoStab);
6943 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6944 } else {
6945 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6946 // and so hardcoding the Video Stab result to OFF mode.
6947 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6948 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006949 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006950 }
6951
6952 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6953 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6954 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6955 }
6956
6957 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6958 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6959 }
6960
Thierry Strudel3d639192016-09-09 11:52:26 -07006961 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6962 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006963 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006964
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006965 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6966 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006967
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006968 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006969 blackLevelAppliedPattern->cam_black_level[0],
6970 blackLevelAppliedPattern->cam_black_level[1],
6971 blackLevelAppliedPattern->cam_black_level[2],
6972 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006973 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6974 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006975
6976#ifndef USE_HAL_3_3
6977 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05306978 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07006979 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05306980 fwk_blackLevelInd[0] /= 16.0;
6981 fwk_blackLevelInd[1] /= 16.0;
6982 fwk_blackLevelInd[2] /= 16.0;
6983 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006984 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6985 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006986#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006987 }
6988
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006989#ifndef USE_HAL_3_3
6990 // Fixed whitelevel is used by ISP/Sensor
6991 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6992 &gCamCapability[mCameraId]->white_level, 1);
6993#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006994
6995 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6996 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6997 int32_t scalerCropRegion[4];
6998 scalerCropRegion[0] = hScalerCropRegion->left;
6999 scalerCropRegion[1] = hScalerCropRegion->top;
7000 scalerCropRegion[2] = hScalerCropRegion->width;
7001 scalerCropRegion[3] = hScalerCropRegion->height;
7002
7003 // Adjust crop region from sensor output coordinate system to active
7004 // array coordinate system.
7005 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7006 scalerCropRegion[2], scalerCropRegion[3]);
7007
7008 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7009 }
7010
7011 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7012 LOGD("sensorExpTime = %lld", *sensorExpTime);
7013 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7014 }
7015
7016 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7017 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7018 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7019 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7020 }
7021
7022 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7023 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7024 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7025 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7026 sensorRollingShutterSkew, 1);
7027 }
7028
7029 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7030 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7031 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7032
7033 //calculate the noise profile based on sensitivity
7034 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7035 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7036 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7037 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7038 noise_profile[i] = noise_profile_S;
7039 noise_profile[i+1] = noise_profile_O;
7040 }
7041 LOGD("noise model entry (S, O) is (%f, %f)",
7042 noise_profile_S, noise_profile_O);
7043 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7044 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7045 }
7046
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007047#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007048 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007049 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007050 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007051 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007052 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7053 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7054 }
7055 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007056#endif
7057
Thierry Strudel3d639192016-09-09 11:52:26 -07007058 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7059 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7060 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7061 }
7062
7063 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7064 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7065 *faceDetectMode);
7066 if (NAME_NOT_FOUND != val) {
7067 uint8_t fwk_faceDetectMode = (uint8_t)val;
7068 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7069
7070 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7071 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7072 CAM_INTF_META_FACE_DETECTION, metadata) {
7073 uint8_t numFaces = MIN(
7074 faceDetectionInfo->num_faces_detected, MAX_ROI);
7075 int32_t faceIds[MAX_ROI];
7076 uint8_t faceScores[MAX_ROI];
7077 int32_t faceRectangles[MAX_ROI * 4];
7078 int32_t faceLandmarks[MAX_ROI * 6];
7079 size_t j = 0, k = 0;
7080
7081 for (size_t i = 0; i < numFaces; i++) {
7082 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7083 // Adjust crop region from sensor output coordinate system to active
7084 // array coordinate system.
7085 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7086 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7087 rect.width, rect.height);
7088
7089 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7090 faceRectangles+j, -1);
7091
Jason Lee8ce36fa2017-04-19 19:40:37 -07007092 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7093 "bottom-right (%d, %d)",
7094 faceDetectionInfo->frame_id, i,
7095 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7096 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7097
Thierry Strudel3d639192016-09-09 11:52:26 -07007098 j+= 4;
7099 }
7100 if (numFaces <= 0) {
7101 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7102 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7103 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7104 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7105 }
7106
7107 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7108 numFaces);
7109 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7110 faceRectangles, numFaces * 4U);
7111 if (fwk_faceDetectMode ==
7112 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7113 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7114 CAM_INTF_META_FACE_LANDMARK, metadata) {
7115
7116 for (size_t i = 0; i < numFaces; i++) {
7117 // Map the co-ordinate sensor output coordinate system to active
7118 // array coordinate system.
7119 mCropRegionMapper.toActiveArray(
7120 landmarks->face_landmarks[i].left_eye_center.x,
7121 landmarks->face_landmarks[i].left_eye_center.y);
7122 mCropRegionMapper.toActiveArray(
7123 landmarks->face_landmarks[i].right_eye_center.x,
7124 landmarks->face_landmarks[i].right_eye_center.y);
7125 mCropRegionMapper.toActiveArray(
7126 landmarks->face_landmarks[i].mouth_center.x,
7127 landmarks->face_landmarks[i].mouth_center.y);
7128
7129 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007130
7131 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7132 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7133 faceDetectionInfo->frame_id, i,
7134 faceLandmarks[k + LEFT_EYE_X],
7135 faceLandmarks[k + LEFT_EYE_Y],
7136 faceLandmarks[k + RIGHT_EYE_X],
7137 faceLandmarks[k + RIGHT_EYE_Y],
7138 faceLandmarks[k + MOUTH_X],
7139 faceLandmarks[k + MOUTH_Y]);
7140
Thierry Strudel04e026f2016-10-10 11:27:36 -07007141 k+= TOTAL_LANDMARK_INDICES;
7142 }
7143 } else {
7144 for (size_t i = 0; i < numFaces; i++) {
7145 setInvalidLandmarks(faceLandmarks+k);
7146 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007147 }
7148 }
7149
Jason Lee49619db2017-04-13 12:07:22 -07007150 for (size_t i = 0; i < numFaces; i++) {
7151 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7152
7153 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7154 faceDetectionInfo->frame_id, i, faceIds[i]);
7155 }
7156
Thierry Strudel3d639192016-09-09 11:52:26 -07007157 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7158 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7159 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007160 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007161 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7162 CAM_INTF_META_FACE_BLINK, metadata) {
7163 uint8_t detected[MAX_ROI];
7164 uint8_t degree[MAX_ROI * 2];
7165 for (size_t i = 0; i < numFaces; i++) {
7166 detected[i] = blinks->blink[i].blink_detected;
7167 degree[2 * i] = blinks->blink[i].left_blink;
7168 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007169
Jason Lee49619db2017-04-13 12:07:22 -07007170 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7171 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7172 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7173 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007174 }
7175 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7176 detected, numFaces);
7177 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7178 degree, numFaces * 2);
7179 }
7180 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7181 CAM_INTF_META_FACE_SMILE, metadata) {
7182 uint8_t degree[MAX_ROI];
7183 uint8_t confidence[MAX_ROI];
7184 for (size_t i = 0; i < numFaces; i++) {
7185 degree[i] = smiles->smile[i].smile_degree;
7186 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007187
Jason Lee49619db2017-04-13 12:07:22 -07007188 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7189 "smile_degree=%d, smile_score=%d",
7190 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007191 }
7192 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7193 degree, numFaces);
7194 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7195 confidence, numFaces);
7196 }
7197 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7198 CAM_INTF_META_FACE_GAZE, metadata) {
7199 int8_t angle[MAX_ROI];
7200 int32_t direction[MAX_ROI * 3];
7201 int8_t degree[MAX_ROI * 2];
7202 for (size_t i = 0; i < numFaces; i++) {
7203 angle[i] = gazes->gaze[i].gaze_angle;
7204 direction[3 * i] = gazes->gaze[i].updown_dir;
7205 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7206 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7207 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7208 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007209
7210 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7211 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7212 "left_right_gaze=%d, top_bottom_gaze=%d",
7213 faceDetectionInfo->frame_id, i, angle[i],
7214 direction[3 * i], direction[3 * i + 1],
7215 direction[3 * i + 2],
7216 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007217 }
7218 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7219 (uint8_t *)angle, numFaces);
7220 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7221 direction, numFaces * 3);
7222 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7223 (uint8_t *)degree, numFaces * 2);
7224 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007225 }
7226 }
7227 }
7228 }
7229
7230 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7231 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007232 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007233 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007234 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007235
Shuzhen Wang14415f52016-11-16 18:26:18 -08007236 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7237 histogramBins = *histBins;
7238 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7239 }
7240
7241 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007242 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7243 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007244 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007245
7246 switch (stats_data->type) {
7247 case CAM_HISTOGRAM_TYPE_BAYER:
7248 switch (stats_data->bayer_stats.data_type) {
7249 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007250 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7251 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007252 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007253 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7254 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007255 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007256 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7257 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007258 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007259 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007260 case CAM_STATS_CHANNEL_R:
7261 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007262 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7263 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007264 }
7265 break;
7266 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007267 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007268 break;
7269 }
7270
Shuzhen Wang14415f52016-11-16 18:26:18 -08007271 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007272 }
7273 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007274 }
7275
7276 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7277 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7278 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7279 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7280 }
7281
7282 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7283 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7284 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7285 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7286 }
7287
7288 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7289 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7290 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7291 CAM_MAX_SHADING_MAP_HEIGHT);
7292 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7293 CAM_MAX_SHADING_MAP_WIDTH);
7294 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7295 lensShadingMap->lens_shading, 4U * map_width * map_height);
7296 }
7297
7298 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7299 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7300 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7301 }
7302
7303 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7304 //Populate CAM_INTF_META_TONEMAP_CURVES
7305 /* ch0 = G, ch 1 = B, ch 2 = R*/
7306 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7307 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7308 tonemap->tonemap_points_cnt,
7309 CAM_MAX_TONEMAP_CURVE_SIZE);
7310 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7311 }
7312
7313 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7314 &tonemap->curves[0].tonemap_points[0][0],
7315 tonemap->tonemap_points_cnt * 2);
7316
7317 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7318 &tonemap->curves[1].tonemap_points[0][0],
7319 tonemap->tonemap_points_cnt * 2);
7320
7321 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7322 &tonemap->curves[2].tonemap_points[0][0],
7323 tonemap->tonemap_points_cnt * 2);
7324 }
7325
7326 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7327 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7328 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7329 CC_GAIN_MAX);
7330 }
7331
7332 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7333 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7334 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7335 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7336 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7337 }
7338
7339 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7340 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7341 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7342 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7343 toneCurve->tonemap_points_cnt,
7344 CAM_MAX_TONEMAP_CURVE_SIZE);
7345 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7346 }
7347 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7348 (float*)toneCurve->curve.tonemap_points,
7349 toneCurve->tonemap_points_cnt * 2);
7350 }
7351
7352 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7353 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7354 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7355 predColorCorrectionGains->gains, 4);
7356 }
7357
7358 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7359 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7360 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7361 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7362 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7363 }
7364
7365 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7366 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7367 }
7368
7369 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7370 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7371 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7372 }
7373
7374 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7375 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7376 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7377 }
7378
7379 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7380 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7381 *effectMode);
7382 if (NAME_NOT_FOUND != val) {
7383 uint8_t fwk_effectMode = (uint8_t)val;
7384 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7385 }
7386 }
7387
7388 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7389 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7390 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7391 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7392 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7393 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7394 }
7395 int32_t fwk_testPatternData[4];
7396 fwk_testPatternData[0] = testPatternData->r;
7397 fwk_testPatternData[3] = testPatternData->b;
7398 switch (gCamCapability[mCameraId]->color_arrangement) {
7399 case CAM_FILTER_ARRANGEMENT_RGGB:
7400 case CAM_FILTER_ARRANGEMENT_GRBG:
7401 fwk_testPatternData[1] = testPatternData->gr;
7402 fwk_testPatternData[2] = testPatternData->gb;
7403 break;
7404 case CAM_FILTER_ARRANGEMENT_GBRG:
7405 case CAM_FILTER_ARRANGEMENT_BGGR:
7406 fwk_testPatternData[2] = testPatternData->gr;
7407 fwk_testPatternData[1] = testPatternData->gb;
7408 break;
7409 default:
7410 LOGE("color arrangement %d is not supported",
7411 gCamCapability[mCameraId]->color_arrangement);
7412 break;
7413 }
7414 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7415 }
7416
7417 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7418 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7419 }
7420
7421 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7422 String8 str((const char *)gps_methods);
7423 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7424 }
7425
7426 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7427 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7428 }
7429
7430 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7431 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7432 }
7433
7434 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7435 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7436 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7437 }
7438
7439 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7440 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7441 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7442 }
7443
7444 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7445 int32_t fwk_thumb_size[2];
7446 fwk_thumb_size[0] = thumb_size->width;
7447 fwk_thumb_size[1] = thumb_size->height;
7448 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7449 }
7450
7451 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7452 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7453 privateData,
7454 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7455 }
7456
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007457 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007458 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007459 meteringMode, 1);
7460 }
7461
Thierry Strudel54dc9782017-02-15 12:12:10 -08007462 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7463 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7464 LOGD("hdr_scene_data: %d %f\n",
7465 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7466 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7467 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7468 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7469 &isHdr, 1);
7470 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7471 &isHdrConfidence, 1);
7472 }
7473
7474
7475
Thierry Strudel3d639192016-09-09 11:52:26 -07007476 if (metadata->is_tuning_params_valid) {
7477 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7478 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7479 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7480
7481
7482 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7483 sizeof(uint32_t));
7484 data += sizeof(uint32_t);
7485
7486 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7487 sizeof(uint32_t));
7488 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7489 data += sizeof(uint32_t);
7490
7491 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7492 sizeof(uint32_t));
7493 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7494 data += sizeof(uint32_t);
7495
7496 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7497 sizeof(uint32_t));
7498 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7499 data += sizeof(uint32_t);
7500
7501 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7502 sizeof(uint32_t));
7503 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7504 data += sizeof(uint32_t);
7505
7506 metadata->tuning_params.tuning_mod3_data_size = 0;
7507 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7508 sizeof(uint32_t));
7509 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7510 data += sizeof(uint32_t);
7511
7512 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7513 TUNING_SENSOR_DATA_MAX);
7514 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7515 count);
7516 data += count;
7517
7518 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7519 TUNING_VFE_DATA_MAX);
7520 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7521 count);
7522 data += count;
7523
7524 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7525 TUNING_CPP_DATA_MAX);
7526 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7527 count);
7528 data += count;
7529
7530 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7531 TUNING_CAC_DATA_MAX);
7532 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7533 count);
7534 data += count;
7535
7536 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7537 (int32_t *)(void *)tuning_meta_data_blob,
7538 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7539 }
7540
7541 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7542 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7543 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7544 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7545 NEUTRAL_COL_POINTS);
7546 }
7547
7548 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7549 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7550 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7551 }
7552
7553 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7554 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7555 // Adjust crop region from sensor output coordinate system to active
7556 // array coordinate system.
7557 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7558 hAeRegions->rect.width, hAeRegions->rect.height);
7559
7560 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7561 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7562 REGIONS_TUPLE_COUNT);
7563 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7564 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7565 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7566 hAeRegions->rect.height);
7567 }
7568
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007569 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7570 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7571 if (NAME_NOT_FOUND != val) {
7572 uint8_t fwkAfMode = (uint8_t)val;
7573 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7574 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7575 } else {
7576 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7577 val);
7578 }
7579 }
7580
Thierry Strudel3d639192016-09-09 11:52:26 -07007581 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7582 uint8_t fwk_afState = (uint8_t) *afState;
7583 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007584 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007585 }
7586
7587 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7588 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7589 }
7590
7591 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7592 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7593 }
7594
7595 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7596 uint8_t fwk_lensState = *lensState;
7597 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7598 }
7599
Thierry Strudel3d639192016-09-09 11:52:26 -07007600
7601 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007602 uint32_t ab_mode = *hal_ab_mode;
7603 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7604 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7605 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7606 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007607 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007608 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007609 if (NAME_NOT_FOUND != val) {
7610 uint8_t fwk_ab_mode = (uint8_t)val;
7611 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7612 }
7613 }
7614
7615 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7616 int val = lookupFwkName(SCENE_MODES_MAP,
7617 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7618 if (NAME_NOT_FOUND != val) {
7619 uint8_t fwkBestshotMode = (uint8_t)val;
7620 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7621 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7622 } else {
7623 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7624 }
7625 }
7626
7627 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7628 uint8_t fwk_mode = (uint8_t) *mode;
7629 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7630 }
7631
7632 /* Constant metadata values to be update*/
7633 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7634 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7635
7636 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7637 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7638
7639 int32_t hotPixelMap[2];
7640 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7641
7642 // CDS
7643 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7644 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7645 }
7646
Thierry Strudel04e026f2016-10-10 11:27:36 -07007647 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7648 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007649 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007650 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7651 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7652 } else {
7653 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7654 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007655
7656 if(fwk_hdr != curr_hdr_state) {
7657 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7658 if(fwk_hdr)
7659 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7660 else
7661 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7662 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007663 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7664 }
7665
Thierry Strudel54dc9782017-02-15 12:12:10 -08007666 //binning correction
7667 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7668 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7669 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7670 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7671 }
7672
Thierry Strudel04e026f2016-10-10 11:27:36 -07007673 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007674 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007675 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7676 int8_t is_ir_on = 0;
7677
7678 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7679 if(is_ir_on != curr_ir_state) {
7680 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7681 if(is_ir_on)
7682 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7683 else
7684 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7685 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007686 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007687 }
7688
Thierry Strudel269c81a2016-10-12 12:13:59 -07007689 // AEC SPEED
7690 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7691 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7692 }
7693
7694 // AWB SPEED
7695 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7696 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7697 }
7698
Thierry Strudel3d639192016-09-09 11:52:26 -07007699 // TNR
7700 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7701 uint8_t tnr_enable = tnr->denoise_enable;
7702 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007703 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7704 int8_t is_tnr_on = 0;
7705
7706 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7707 if(is_tnr_on != curr_tnr_state) {
7708 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7709 if(is_tnr_on)
7710 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7711 else
7712 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7713 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007714
7715 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7716 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7717 }
7718
7719 // Reprocess crop data
7720 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7721 uint8_t cnt = crop_data->num_of_streams;
7722 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7723 // mm-qcamera-daemon only posts crop_data for streams
7724 // not linked to pproc. So no valid crop metadata is not
7725 // necessarily an error case.
7726 LOGD("No valid crop metadata entries");
7727 } else {
7728 uint32_t reproc_stream_id;
7729 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7730 LOGD("No reprocessible stream found, ignore crop data");
7731 } else {
7732 int rc = NO_ERROR;
7733 Vector<int32_t> roi_map;
7734 int32_t *crop = new int32_t[cnt*4];
7735 if (NULL == crop) {
7736 rc = NO_MEMORY;
7737 }
7738 if (NO_ERROR == rc) {
7739 int32_t streams_found = 0;
7740 for (size_t i = 0; i < cnt; i++) {
7741 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7742 if (pprocDone) {
7743 // HAL already does internal reprocessing,
7744 // either via reprocessing before JPEG encoding,
7745 // or offline postprocessing for pproc bypass case.
7746 crop[0] = 0;
7747 crop[1] = 0;
7748 crop[2] = mInputStreamInfo.dim.width;
7749 crop[3] = mInputStreamInfo.dim.height;
7750 } else {
7751 crop[0] = crop_data->crop_info[i].crop.left;
7752 crop[1] = crop_data->crop_info[i].crop.top;
7753 crop[2] = crop_data->crop_info[i].crop.width;
7754 crop[3] = crop_data->crop_info[i].crop.height;
7755 }
7756 roi_map.add(crop_data->crop_info[i].roi_map.left);
7757 roi_map.add(crop_data->crop_info[i].roi_map.top);
7758 roi_map.add(crop_data->crop_info[i].roi_map.width);
7759 roi_map.add(crop_data->crop_info[i].roi_map.height);
7760 streams_found++;
7761 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7762 crop[0], crop[1], crop[2], crop[3]);
7763 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7764 crop_data->crop_info[i].roi_map.left,
7765 crop_data->crop_info[i].roi_map.top,
7766 crop_data->crop_info[i].roi_map.width,
7767 crop_data->crop_info[i].roi_map.height);
7768 break;
7769
7770 }
7771 }
7772 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7773 &streams_found, 1);
7774 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7775 crop, (size_t)(streams_found * 4));
7776 if (roi_map.array()) {
7777 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7778 roi_map.array(), roi_map.size());
7779 }
7780 }
7781 if (crop) {
7782 delete [] crop;
7783 }
7784 }
7785 }
7786 }
7787
7788 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7789 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7790 // so hardcoding the CAC result to OFF mode.
7791 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7792 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7793 } else {
7794 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7795 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7796 *cacMode);
7797 if (NAME_NOT_FOUND != val) {
7798 uint8_t resultCacMode = (uint8_t)val;
7799 // check whether CAC result from CB is equal to Framework set CAC mode
7800 // If not equal then set the CAC mode came in corresponding request
7801 if (fwk_cacMode != resultCacMode) {
7802 resultCacMode = fwk_cacMode;
7803 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007804 //Check if CAC is disabled by property
7805 if (m_cacModeDisabled) {
7806 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7807 }
7808
Thierry Strudel3d639192016-09-09 11:52:26 -07007809 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7810 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7811 } else {
7812 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7813 }
7814 }
7815 }
7816
7817 // Post blob of cam_cds_data through vendor tag.
7818 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7819 uint8_t cnt = cdsInfo->num_of_streams;
7820 cam_cds_data_t cdsDataOverride;
7821 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7822 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7823 cdsDataOverride.num_of_streams = 1;
7824 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7825 uint32_t reproc_stream_id;
7826 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7827 LOGD("No reprocessible stream found, ignore cds data");
7828 } else {
7829 for (size_t i = 0; i < cnt; i++) {
7830 if (cdsInfo->cds_info[i].stream_id ==
7831 reproc_stream_id) {
7832 cdsDataOverride.cds_info[0].cds_enable =
7833 cdsInfo->cds_info[i].cds_enable;
7834 break;
7835 }
7836 }
7837 }
7838 } else {
7839 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7840 }
7841 camMetadata.update(QCAMERA3_CDS_INFO,
7842 (uint8_t *)&cdsDataOverride,
7843 sizeof(cam_cds_data_t));
7844 }
7845
7846 // Ldaf calibration data
7847 if (!mLdafCalibExist) {
7848 IF_META_AVAILABLE(uint32_t, ldafCalib,
7849 CAM_INTF_META_LDAF_EXIF, metadata) {
7850 mLdafCalibExist = true;
7851 mLdafCalib[0] = ldafCalib[0];
7852 mLdafCalib[1] = ldafCalib[1];
7853 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7854 ldafCalib[0], ldafCalib[1]);
7855 }
7856 }
7857
Thierry Strudel54dc9782017-02-15 12:12:10 -08007858 // EXIF debug data through vendor tag
7859 /*
7860 * Mobicat Mask can assume 3 values:
7861 * 1 refers to Mobicat data,
7862 * 2 refers to Stats Debug and Exif Debug Data
7863 * 3 refers to Mobicat and Stats Debug Data
7864 * We want to make sure that we are sending Exif debug data
7865 * only when Mobicat Mask is 2.
7866 */
7867 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7868 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7869 (uint8_t *)(void *)mExifParams.debug_params,
7870 sizeof(mm_jpeg_debug_exif_params_t));
7871 }
7872
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007873 // Reprocess and DDM debug data through vendor tag
7874 cam_reprocess_info_t repro_info;
7875 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007876 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7877 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007878 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007879 }
7880 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7881 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007882 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007883 }
7884 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7885 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007886 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007887 }
7888 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7889 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007890 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007891 }
7892 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7893 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007894 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007895 }
7896 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007897 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007898 }
7899 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7900 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007901 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007902 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007903 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7904 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7905 }
7906 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7907 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7908 }
7909 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7910 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007911
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007912 // INSTANT AEC MODE
7913 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7914 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7915 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7916 }
7917
Shuzhen Wange763e802016-03-31 10:24:29 -07007918 // AF scene change
7919 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7920 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7921 }
7922
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07007923 // Enable ZSL
7924 if (enableZsl != nullptr) {
7925 uint8_t value = *enableZsl ?
7926 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
7927 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
7928 }
7929
Thierry Strudel3d639192016-09-09 11:52:26 -07007930 resultMetadata = camMetadata.release();
7931 return resultMetadata;
7932}
7933
7934/*===========================================================================
7935 * FUNCTION : saveExifParams
7936 *
7937 * DESCRIPTION:
7938 *
7939 * PARAMETERS :
7940 * @metadata : metadata information from callback
7941 *
7942 * RETURN : none
7943 *
7944 *==========================================================================*/
7945void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7946{
7947 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7948 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7949 if (mExifParams.debug_params) {
7950 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7951 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7952 }
7953 }
7954 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7955 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7956 if (mExifParams.debug_params) {
7957 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7958 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7959 }
7960 }
7961 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7962 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7963 if (mExifParams.debug_params) {
7964 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7965 mExifParams.debug_params->af_debug_params_valid = TRUE;
7966 }
7967 }
7968 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7969 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7970 if (mExifParams.debug_params) {
7971 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7972 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7973 }
7974 }
7975 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7976 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7977 if (mExifParams.debug_params) {
7978 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7979 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7980 }
7981 }
7982 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7983 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7984 if (mExifParams.debug_params) {
7985 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7986 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7987 }
7988 }
7989 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7990 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7991 if (mExifParams.debug_params) {
7992 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7993 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7994 }
7995 }
7996 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7997 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7998 if (mExifParams.debug_params) {
7999 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8000 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8001 }
8002 }
8003}
8004
8005/*===========================================================================
8006 * FUNCTION : get3AExifParams
8007 *
8008 * DESCRIPTION:
8009 *
8010 * PARAMETERS : none
8011 *
8012 *
8013 * RETURN : mm_jpeg_exif_params_t
8014 *
8015 *==========================================================================*/
8016mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8017{
8018 return mExifParams;
8019}
8020
8021/*===========================================================================
8022 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8023 *
8024 * DESCRIPTION:
8025 *
8026 * PARAMETERS :
8027 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008028 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8029 * urgent metadata in a batch. Always true for
8030 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008031 *
8032 * RETURN : camera_metadata_t*
8033 * metadata in a format specified by fwk
8034 *==========================================================================*/
8035camera_metadata_t*
8036QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008037 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008038{
8039 CameraMetadata camMetadata;
8040 camera_metadata_t *resultMetadata;
8041
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008042 if (!lastUrgentMetadataInBatch) {
8043 /* In batch mode, use empty metadata if this is not the last in batch
8044 */
8045 resultMetadata = allocate_camera_metadata(0, 0);
8046 return resultMetadata;
8047 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008048
8049 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8050 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8051 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8052 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8053 }
8054
8055 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8056 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8057 &aecTrigger->trigger, 1);
8058 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8059 &aecTrigger->trigger_id, 1);
8060 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8061 aecTrigger->trigger);
8062 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8063 aecTrigger->trigger_id);
8064 }
8065
8066 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8067 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8068 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8069 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8070 }
8071
Thierry Strudel3d639192016-09-09 11:52:26 -07008072 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8073 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8074 &af_trigger->trigger, 1);
8075 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8076 af_trigger->trigger);
8077 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8078 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8079 af_trigger->trigger_id);
8080 }
8081
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008082 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8083 /*af regions*/
8084 int32_t afRegions[REGIONS_TUPLE_COUNT];
8085 // Adjust crop region from sensor output coordinate system to active
8086 // array coordinate system.
8087 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8088 hAfRegions->rect.width, hAfRegions->rect.height);
8089
8090 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8091 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8092 REGIONS_TUPLE_COUNT);
8093 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8094 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8095 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8096 hAfRegions->rect.height);
8097 }
8098
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008099 // AF region confidence
8100 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8101 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8102 }
8103
Thierry Strudel3d639192016-09-09 11:52:26 -07008104 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8105 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8106 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8107 if (NAME_NOT_FOUND != val) {
8108 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8109 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8110 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8111 } else {
8112 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8113 }
8114 }
8115
8116 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8117 uint32_t aeMode = CAM_AE_MODE_MAX;
8118 int32_t flashMode = CAM_FLASH_MODE_MAX;
8119 int32_t redeye = -1;
8120 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8121 aeMode = *pAeMode;
8122 }
8123 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8124 flashMode = *pFlashMode;
8125 }
8126 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8127 redeye = *pRedeye;
8128 }
8129
8130 if (1 == redeye) {
8131 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8132 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8133 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8134 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8135 flashMode);
8136 if (NAME_NOT_FOUND != val) {
8137 fwk_aeMode = (uint8_t)val;
8138 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8139 } else {
8140 LOGE("Unsupported flash mode %d", flashMode);
8141 }
8142 } else if (aeMode == CAM_AE_MODE_ON) {
8143 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8144 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8145 } else if (aeMode == CAM_AE_MODE_OFF) {
8146 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8147 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008148 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8149 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8150 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008151 } else {
8152 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8153 "flashMode:%d, aeMode:%u!!!",
8154 redeye, flashMode, aeMode);
8155 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008156 if (mInstantAEC) {
8157 // Increment frame Idx count untill a bound reached for instant AEC.
8158 mInstantAecFrameIdxCount++;
8159 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8160 CAM_INTF_META_AEC_INFO, metadata) {
8161 LOGH("ae_params->settled = %d",ae_params->settled);
8162 // If AEC settled, or if number of frames reached bound value,
8163 // should reset instant AEC.
8164 if (ae_params->settled ||
8165 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8166 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8167 mInstantAEC = false;
8168 mResetInstantAEC = true;
8169 mInstantAecFrameIdxCount = 0;
8170 }
8171 }
8172 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008173 resultMetadata = camMetadata.release();
8174 return resultMetadata;
8175}
8176
8177/*===========================================================================
8178 * FUNCTION : dumpMetadataToFile
8179 *
8180 * DESCRIPTION: Dumps tuning metadata to file system
8181 *
8182 * PARAMETERS :
8183 * @meta : tuning metadata
8184 * @dumpFrameCount : current dump frame count
8185 * @enabled : Enable mask
8186 *
8187 *==========================================================================*/
8188void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8189 uint32_t &dumpFrameCount,
8190 bool enabled,
8191 const char *type,
8192 uint32_t frameNumber)
8193{
8194 //Some sanity checks
8195 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8196 LOGE("Tuning sensor data size bigger than expected %d: %d",
8197 meta.tuning_sensor_data_size,
8198 TUNING_SENSOR_DATA_MAX);
8199 return;
8200 }
8201
8202 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8203 LOGE("Tuning VFE data size bigger than expected %d: %d",
8204 meta.tuning_vfe_data_size,
8205 TUNING_VFE_DATA_MAX);
8206 return;
8207 }
8208
8209 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8210 LOGE("Tuning CPP data size bigger than expected %d: %d",
8211 meta.tuning_cpp_data_size,
8212 TUNING_CPP_DATA_MAX);
8213 return;
8214 }
8215
8216 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8217 LOGE("Tuning CAC data size bigger than expected %d: %d",
8218 meta.tuning_cac_data_size,
8219 TUNING_CAC_DATA_MAX);
8220 return;
8221 }
8222 //
8223
8224 if(enabled){
8225 char timeBuf[FILENAME_MAX];
8226 char buf[FILENAME_MAX];
8227 memset(buf, 0, sizeof(buf));
8228 memset(timeBuf, 0, sizeof(timeBuf));
8229 time_t current_time;
8230 struct tm * timeinfo;
8231 time (&current_time);
8232 timeinfo = localtime (&current_time);
8233 if (timeinfo != NULL) {
8234 strftime (timeBuf, sizeof(timeBuf),
8235 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8236 }
8237 String8 filePath(timeBuf);
8238 snprintf(buf,
8239 sizeof(buf),
8240 "%dm_%s_%d.bin",
8241 dumpFrameCount,
8242 type,
8243 frameNumber);
8244 filePath.append(buf);
8245 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8246 if (file_fd >= 0) {
8247 ssize_t written_len = 0;
8248 meta.tuning_data_version = TUNING_DATA_VERSION;
8249 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8250 written_len += write(file_fd, data, sizeof(uint32_t));
8251 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8252 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8253 written_len += write(file_fd, data, sizeof(uint32_t));
8254 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8255 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8256 written_len += write(file_fd, data, sizeof(uint32_t));
8257 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8258 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8259 written_len += write(file_fd, data, sizeof(uint32_t));
8260 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8261 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8262 written_len += write(file_fd, data, sizeof(uint32_t));
8263 meta.tuning_mod3_data_size = 0;
8264 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8265 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8266 written_len += write(file_fd, data, sizeof(uint32_t));
8267 size_t total_size = meta.tuning_sensor_data_size;
8268 data = (void *)((uint8_t *)&meta.data);
8269 written_len += write(file_fd, data, total_size);
8270 total_size = meta.tuning_vfe_data_size;
8271 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8272 written_len += write(file_fd, data, total_size);
8273 total_size = meta.tuning_cpp_data_size;
8274 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8275 written_len += write(file_fd, data, total_size);
8276 total_size = meta.tuning_cac_data_size;
8277 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8278 written_len += write(file_fd, data, total_size);
8279 close(file_fd);
8280 }else {
8281 LOGE("fail to open file for metadata dumping");
8282 }
8283 }
8284}
8285
8286/*===========================================================================
8287 * FUNCTION : cleanAndSortStreamInfo
8288 *
8289 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8290 * and sort them such that raw stream is at the end of the list
8291 * This is a workaround for camera daemon constraint.
8292 *
8293 * PARAMETERS : None
8294 *
8295 *==========================================================================*/
8296void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8297{
8298 List<stream_info_t *> newStreamInfo;
8299
8300 /*clean up invalid streams*/
8301 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8302 it != mStreamInfo.end();) {
8303 if(((*it)->status) == INVALID){
8304 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8305 delete channel;
8306 free(*it);
8307 it = mStreamInfo.erase(it);
8308 } else {
8309 it++;
8310 }
8311 }
8312
8313 // Move preview/video/callback/snapshot streams into newList
8314 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8315 it != mStreamInfo.end();) {
8316 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8317 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8318 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8319 newStreamInfo.push_back(*it);
8320 it = mStreamInfo.erase(it);
8321 } else
8322 it++;
8323 }
8324 // Move raw streams into newList
8325 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8326 it != mStreamInfo.end();) {
8327 newStreamInfo.push_back(*it);
8328 it = mStreamInfo.erase(it);
8329 }
8330
8331 mStreamInfo = newStreamInfo;
8332}
8333
8334/*===========================================================================
8335 * FUNCTION : extractJpegMetadata
8336 *
8337 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8338 * JPEG metadata is cached in HAL, and return as part of capture
8339 * result when metadata is returned from camera daemon.
8340 *
8341 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8342 * @request: capture request
8343 *
8344 *==========================================================================*/
8345void QCamera3HardwareInterface::extractJpegMetadata(
8346 CameraMetadata& jpegMetadata,
8347 const camera3_capture_request_t *request)
8348{
8349 CameraMetadata frame_settings;
8350 frame_settings = request->settings;
8351
8352 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8353 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8354 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8355 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8356
8357 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8358 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8359 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8360 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8361
8362 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8363 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8364 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8365 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8366
8367 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8368 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8369 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8370 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8371
8372 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8373 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8374 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8375 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8376
8377 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8378 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8379 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8380 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8381
8382 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8383 int32_t thumbnail_size[2];
8384 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8385 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8386 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8387 int32_t orientation =
8388 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008389 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008390 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8391 int32_t temp;
8392 temp = thumbnail_size[0];
8393 thumbnail_size[0] = thumbnail_size[1];
8394 thumbnail_size[1] = temp;
8395 }
8396 }
8397 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8398 thumbnail_size,
8399 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8400 }
8401
8402}
8403
8404/*===========================================================================
8405 * FUNCTION : convertToRegions
8406 *
8407 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8408 *
8409 * PARAMETERS :
8410 * @rect : cam_rect_t struct to convert
8411 * @region : int32_t destination array
8412 * @weight : if we are converting from cam_area_t, weight is valid
8413 * else weight = -1
8414 *
8415 *==========================================================================*/
8416void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8417 int32_t *region, int weight)
8418{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008419 region[FACE_LEFT] = rect.left;
8420 region[FACE_TOP] = rect.top;
8421 region[FACE_RIGHT] = rect.left + rect.width;
8422 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008423 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008424 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008425 }
8426}
8427
8428/*===========================================================================
8429 * FUNCTION : convertFromRegions
8430 *
8431 * DESCRIPTION: helper method to convert from array to cam_rect_t
8432 *
8433 * PARAMETERS :
8434 * @rect : cam_rect_t struct to convert
8435 * @region : int32_t destination array
8436 * @weight : if we are converting from cam_area_t, weight is valid
8437 * else weight = -1
8438 *
8439 *==========================================================================*/
8440void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008441 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008442{
Thierry Strudel3d639192016-09-09 11:52:26 -07008443 int32_t x_min = frame_settings.find(tag).data.i32[0];
8444 int32_t y_min = frame_settings.find(tag).data.i32[1];
8445 int32_t x_max = frame_settings.find(tag).data.i32[2];
8446 int32_t y_max = frame_settings.find(tag).data.i32[3];
8447 roi.weight = frame_settings.find(tag).data.i32[4];
8448 roi.rect.left = x_min;
8449 roi.rect.top = y_min;
8450 roi.rect.width = x_max - x_min;
8451 roi.rect.height = y_max - y_min;
8452}
8453
8454/*===========================================================================
8455 * FUNCTION : resetIfNeededROI
8456 *
8457 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8458 * crop region
8459 *
8460 * PARAMETERS :
8461 * @roi : cam_area_t struct to resize
8462 * @scalerCropRegion : cam_crop_region_t region to compare against
8463 *
8464 *
8465 *==========================================================================*/
8466bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8467 const cam_crop_region_t* scalerCropRegion)
8468{
8469 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8470 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8471 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8472 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8473
8474 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8475 * without having this check the calculations below to validate if the roi
8476 * is inside scalar crop region will fail resulting in the roi not being
8477 * reset causing algorithm to continue to use stale roi window
8478 */
8479 if (roi->weight == 0) {
8480 return true;
8481 }
8482
8483 if ((roi_x_max < scalerCropRegion->left) ||
8484 // right edge of roi window is left of scalar crop's left edge
8485 (roi_y_max < scalerCropRegion->top) ||
8486 // bottom edge of roi window is above scalar crop's top edge
8487 (roi->rect.left > crop_x_max) ||
8488 // left edge of roi window is beyond(right) of scalar crop's right edge
8489 (roi->rect.top > crop_y_max)){
8490 // top edge of roi windo is above scalar crop's top edge
8491 return false;
8492 }
8493 if (roi->rect.left < scalerCropRegion->left) {
8494 roi->rect.left = scalerCropRegion->left;
8495 }
8496 if (roi->rect.top < scalerCropRegion->top) {
8497 roi->rect.top = scalerCropRegion->top;
8498 }
8499 if (roi_x_max > crop_x_max) {
8500 roi_x_max = crop_x_max;
8501 }
8502 if (roi_y_max > crop_y_max) {
8503 roi_y_max = crop_y_max;
8504 }
8505 roi->rect.width = roi_x_max - roi->rect.left;
8506 roi->rect.height = roi_y_max - roi->rect.top;
8507 return true;
8508}
8509
8510/*===========================================================================
8511 * FUNCTION : convertLandmarks
8512 *
8513 * DESCRIPTION: helper method to extract the landmarks from face detection info
8514 *
8515 * PARAMETERS :
8516 * @landmark_data : input landmark data to be converted
8517 * @landmarks : int32_t destination array
8518 *
8519 *
8520 *==========================================================================*/
8521void QCamera3HardwareInterface::convertLandmarks(
8522 cam_face_landmarks_info_t landmark_data,
8523 int32_t *landmarks)
8524{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008525 if (landmark_data.is_left_eye_valid) {
8526 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8527 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8528 } else {
8529 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8530 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8531 }
8532
8533 if (landmark_data.is_right_eye_valid) {
8534 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8535 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8536 } else {
8537 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8538 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8539 }
8540
8541 if (landmark_data.is_mouth_valid) {
8542 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8543 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8544 } else {
8545 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8546 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8547 }
8548}
8549
8550/*===========================================================================
8551 * FUNCTION : setInvalidLandmarks
8552 *
8553 * DESCRIPTION: helper method to set invalid landmarks
8554 *
8555 * PARAMETERS :
8556 * @landmarks : int32_t destination array
8557 *
8558 *
8559 *==========================================================================*/
8560void QCamera3HardwareInterface::setInvalidLandmarks(
8561 int32_t *landmarks)
8562{
8563 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8564 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8565 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8566 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8567 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8568 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008569}
8570
8571#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008572
8573/*===========================================================================
8574 * FUNCTION : getCapabilities
8575 *
8576 * DESCRIPTION: query camera capability from back-end
8577 *
8578 * PARAMETERS :
8579 * @ops : mm-interface ops structure
8580 * @cam_handle : camera handle for which we need capability
8581 *
8582 * RETURN : ptr type of capability structure
8583 * capability for success
8584 * NULL for failure
8585 *==========================================================================*/
8586cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8587 uint32_t cam_handle)
8588{
8589 int rc = NO_ERROR;
8590 QCamera3HeapMemory *capabilityHeap = NULL;
8591 cam_capability_t *cap_ptr = NULL;
8592
8593 if (ops == NULL) {
8594 LOGE("Invalid arguments");
8595 return NULL;
8596 }
8597
8598 capabilityHeap = new QCamera3HeapMemory(1);
8599 if (capabilityHeap == NULL) {
8600 LOGE("creation of capabilityHeap failed");
8601 return NULL;
8602 }
8603
8604 /* Allocate memory for capability buffer */
8605 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8606 if(rc != OK) {
8607 LOGE("No memory for cappability");
8608 goto allocate_failed;
8609 }
8610
8611 /* Map memory for capability buffer */
8612 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8613
8614 rc = ops->map_buf(cam_handle,
8615 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8616 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8617 if(rc < 0) {
8618 LOGE("failed to map capability buffer");
8619 rc = FAILED_TRANSACTION;
8620 goto map_failed;
8621 }
8622
8623 /* Query Capability */
8624 rc = ops->query_capability(cam_handle);
8625 if(rc < 0) {
8626 LOGE("failed to query capability");
8627 rc = FAILED_TRANSACTION;
8628 goto query_failed;
8629 }
8630
8631 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8632 if (cap_ptr == NULL) {
8633 LOGE("out of memory");
8634 rc = NO_MEMORY;
8635 goto query_failed;
8636 }
8637
8638 memset(cap_ptr, 0, sizeof(cam_capability_t));
8639 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8640
8641 int index;
8642 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8643 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8644 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8645 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8646 }
8647
8648query_failed:
8649 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8650map_failed:
8651 capabilityHeap->deallocate();
8652allocate_failed:
8653 delete capabilityHeap;
8654
8655 if (rc != NO_ERROR) {
8656 return NULL;
8657 } else {
8658 return cap_ptr;
8659 }
8660}
8661
Thierry Strudel3d639192016-09-09 11:52:26 -07008662/*===========================================================================
8663 * FUNCTION : initCapabilities
8664 *
8665 * DESCRIPTION: initialize camera capabilities in static data struct
8666 *
8667 * PARAMETERS :
8668 * @cameraId : camera Id
8669 *
8670 * RETURN : int32_t type of status
8671 * NO_ERROR -- success
8672 * none-zero failure code
8673 *==========================================================================*/
8674int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8675{
8676 int rc = 0;
8677 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008678 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008679
8680 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8681 if (rc) {
8682 LOGE("camera_open failed. rc = %d", rc);
8683 goto open_failed;
8684 }
8685 if (!cameraHandle) {
8686 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8687 goto open_failed;
8688 }
8689
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008690 handle = get_main_camera_handle(cameraHandle->camera_handle);
8691 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8692 if (gCamCapability[cameraId] == NULL) {
8693 rc = FAILED_TRANSACTION;
8694 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008695 }
8696
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008697 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008698 if (is_dual_camera_by_idx(cameraId)) {
8699 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8700 gCamCapability[cameraId]->aux_cam_cap =
8701 getCapabilities(cameraHandle->ops, handle);
8702 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8703 rc = FAILED_TRANSACTION;
8704 free(gCamCapability[cameraId]);
8705 goto failed_op;
8706 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008707
8708 // Copy the main camera capability to main_cam_cap struct
8709 gCamCapability[cameraId]->main_cam_cap =
8710 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8711 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8712 LOGE("out of memory");
8713 rc = NO_MEMORY;
8714 goto failed_op;
8715 }
8716 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8717 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008718 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008719failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008720 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8721 cameraHandle = NULL;
8722open_failed:
8723 return rc;
8724}
8725
8726/*==========================================================================
8727 * FUNCTION : get3Aversion
8728 *
8729 * DESCRIPTION: get the Q3A S/W version
8730 *
8731 * PARAMETERS :
8732 * @sw_version: Reference of Q3A structure which will hold version info upon
8733 * return
8734 *
8735 * RETURN : None
8736 *
8737 *==========================================================================*/
8738void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8739{
8740 if(gCamCapability[mCameraId])
8741 sw_version = gCamCapability[mCameraId]->q3a_version;
8742 else
8743 LOGE("Capability structure NULL!");
8744}
8745
8746
8747/*===========================================================================
8748 * FUNCTION : initParameters
8749 *
8750 * DESCRIPTION: initialize camera parameters
8751 *
8752 * PARAMETERS :
8753 *
8754 * RETURN : int32_t type of status
8755 * NO_ERROR -- success
8756 * none-zero failure code
8757 *==========================================================================*/
8758int QCamera3HardwareInterface::initParameters()
8759{
8760 int rc = 0;
8761
8762 //Allocate Set Param Buffer
8763 mParamHeap = new QCamera3HeapMemory(1);
8764 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8765 if(rc != OK) {
8766 rc = NO_MEMORY;
8767 LOGE("Failed to allocate SETPARM Heap memory");
8768 delete mParamHeap;
8769 mParamHeap = NULL;
8770 return rc;
8771 }
8772
8773 //Map memory for parameters buffer
8774 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8775 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8776 mParamHeap->getFd(0),
8777 sizeof(metadata_buffer_t),
8778 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8779 if(rc < 0) {
8780 LOGE("failed to map SETPARM buffer");
8781 rc = FAILED_TRANSACTION;
8782 mParamHeap->deallocate();
8783 delete mParamHeap;
8784 mParamHeap = NULL;
8785 return rc;
8786 }
8787
8788 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8789
8790 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8791 return rc;
8792}
8793
8794/*===========================================================================
8795 * FUNCTION : deinitParameters
8796 *
8797 * DESCRIPTION: de-initialize camera parameters
8798 *
8799 * PARAMETERS :
8800 *
8801 * RETURN : NONE
8802 *==========================================================================*/
8803void QCamera3HardwareInterface::deinitParameters()
8804{
8805 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8806 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8807
8808 mParamHeap->deallocate();
8809 delete mParamHeap;
8810 mParamHeap = NULL;
8811
8812 mParameters = NULL;
8813
8814 free(mPrevParameters);
8815 mPrevParameters = NULL;
8816}
8817
8818/*===========================================================================
8819 * FUNCTION : calcMaxJpegSize
8820 *
8821 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8822 *
8823 * PARAMETERS :
8824 *
8825 * RETURN : max_jpeg_size
8826 *==========================================================================*/
8827size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8828{
8829 size_t max_jpeg_size = 0;
8830 size_t temp_width, temp_height;
8831 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8832 MAX_SIZES_CNT);
8833 for (size_t i = 0; i < count; i++) {
8834 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8835 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8836 if (temp_width * temp_height > max_jpeg_size ) {
8837 max_jpeg_size = temp_width * temp_height;
8838 }
8839 }
8840 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8841 return max_jpeg_size;
8842}
8843
8844/*===========================================================================
8845 * FUNCTION : getMaxRawSize
8846 *
8847 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8848 *
8849 * PARAMETERS :
8850 *
8851 * RETURN : Largest supported Raw Dimension
8852 *==========================================================================*/
8853cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8854{
8855 int max_width = 0;
8856 cam_dimension_t maxRawSize;
8857
8858 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8859 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8860 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8861 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8862 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8863 }
8864 }
8865 return maxRawSize;
8866}
8867
8868
8869/*===========================================================================
8870 * FUNCTION : calcMaxJpegDim
8871 *
8872 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8873 *
8874 * PARAMETERS :
8875 *
8876 * RETURN : max_jpeg_dim
8877 *==========================================================================*/
8878cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8879{
8880 cam_dimension_t max_jpeg_dim;
8881 cam_dimension_t curr_jpeg_dim;
8882 max_jpeg_dim.width = 0;
8883 max_jpeg_dim.height = 0;
8884 curr_jpeg_dim.width = 0;
8885 curr_jpeg_dim.height = 0;
8886 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8887 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8888 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8889 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8890 max_jpeg_dim.width * max_jpeg_dim.height ) {
8891 max_jpeg_dim.width = curr_jpeg_dim.width;
8892 max_jpeg_dim.height = curr_jpeg_dim.height;
8893 }
8894 }
8895 return max_jpeg_dim;
8896}
8897
8898/*===========================================================================
8899 * FUNCTION : addStreamConfig
8900 *
8901 * DESCRIPTION: adds the stream configuration to the array
8902 *
8903 * PARAMETERS :
8904 * @available_stream_configs : pointer to stream configuration array
8905 * @scalar_format : scalar format
8906 * @dim : configuration dimension
8907 * @config_type : input or output configuration type
8908 *
8909 * RETURN : NONE
8910 *==========================================================================*/
8911void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8912 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8913{
8914 available_stream_configs.add(scalar_format);
8915 available_stream_configs.add(dim.width);
8916 available_stream_configs.add(dim.height);
8917 available_stream_configs.add(config_type);
8918}
8919
8920/*===========================================================================
8921 * FUNCTION : suppportBurstCapture
8922 *
8923 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8924 *
8925 * PARAMETERS :
8926 * @cameraId : camera Id
8927 *
8928 * RETURN : true if camera supports BURST_CAPTURE
8929 * false otherwise
8930 *==========================================================================*/
8931bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8932{
8933 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8934 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8935 const int32_t highResWidth = 3264;
8936 const int32_t highResHeight = 2448;
8937
8938 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8939 // Maximum resolution images cannot be captured at >= 10fps
8940 // -> not supporting BURST_CAPTURE
8941 return false;
8942 }
8943
8944 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8945 // Maximum resolution images can be captured at >= 20fps
8946 // --> supporting BURST_CAPTURE
8947 return true;
8948 }
8949
8950 // Find the smallest highRes resolution, or largest resolution if there is none
8951 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8952 MAX_SIZES_CNT);
8953 size_t highRes = 0;
8954 while ((highRes + 1 < totalCnt) &&
8955 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8956 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8957 highResWidth * highResHeight)) {
8958 highRes++;
8959 }
8960 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8961 return true;
8962 } else {
8963 return false;
8964 }
8965}
8966
8967/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008968 * FUNCTION : getPDStatIndex
8969 *
8970 * DESCRIPTION: Return the meta raw phase detection statistics index if present
8971 *
8972 * PARAMETERS :
8973 * @caps : camera capabilities
8974 *
8975 * RETURN : int32_t type
8976 * non-negative - on success
8977 * -1 - on failure
8978 *==========================================================================*/
8979int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
8980 if (nullptr == caps) {
8981 return -1;
8982 }
8983
8984 uint32_t metaRawCount = caps->meta_raw_channel_count;
8985 int32_t ret = -1;
8986 for (size_t i = 0; i < metaRawCount; i++) {
8987 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
8988 ret = i;
8989 break;
8990 }
8991 }
8992
8993 return ret;
8994}
8995
8996/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07008997 * FUNCTION : initStaticMetadata
8998 *
8999 * DESCRIPTION: initialize the static metadata
9000 *
9001 * PARAMETERS :
9002 * @cameraId : camera Id
9003 *
9004 * RETURN : int32_t type of status
9005 * 0 -- success
9006 * non-zero failure code
9007 *==========================================================================*/
9008int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9009{
9010 int rc = 0;
9011 CameraMetadata staticInfo;
9012 size_t count = 0;
9013 bool limitedDevice = false;
9014 char prop[PROPERTY_VALUE_MAX];
9015 bool supportBurst = false;
9016
9017 supportBurst = supportBurstCapture(cameraId);
9018
9019 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9020 * guaranteed or if min fps of max resolution is less than 20 fps, its
9021 * advertised as limited device*/
9022 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9023 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9024 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9025 !supportBurst;
9026
9027 uint8_t supportedHwLvl = limitedDevice ?
9028 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009029#ifndef USE_HAL_3_3
9030 // LEVEL_3 - This device will support level 3.
9031 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9032#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009033 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009034#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009035
9036 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9037 &supportedHwLvl, 1);
9038
9039 bool facingBack = false;
9040 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9041 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9042 facingBack = true;
9043 }
9044 /*HAL 3 only*/
9045 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9046 &gCamCapability[cameraId]->min_focus_distance, 1);
9047
9048 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9049 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9050
9051 /*should be using focal lengths but sensor doesn't provide that info now*/
9052 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9053 &gCamCapability[cameraId]->focal_length,
9054 1);
9055
9056 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9057 gCamCapability[cameraId]->apertures,
9058 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9059
9060 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9061 gCamCapability[cameraId]->filter_densities,
9062 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9063
9064
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009065 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9066 size_t mode_count =
9067 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9068 for (size_t i = 0; i < mode_count; i++) {
9069 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9070 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009071 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009072 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009073
9074 int32_t lens_shading_map_size[] = {
9075 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9076 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9077 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9078 lens_shading_map_size,
9079 sizeof(lens_shading_map_size)/sizeof(int32_t));
9080
9081 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9082 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9083
9084 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9085 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9086
9087 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9088 &gCamCapability[cameraId]->max_frame_duration, 1);
9089
9090 camera_metadata_rational baseGainFactor = {
9091 gCamCapability[cameraId]->base_gain_factor.numerator,
9092 gCamCapability[cameraId]->base_gain_factor.denominator};
9093 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9094 &baseGainFactor, 1);
9095
9096 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9097 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9098
9099 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9100 gCamCapability[cameraId]->pixel_array_size.height};
9101 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9102 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9103
9104 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9105 gCamCapability[cameraId]->active_array_size.top,
9106 gCamCapability[cameraId]->active_array_size.width,
9107 gCamCapability[cameraId]->active_array_size.height};
9108 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9109 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9110
9111 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9112 &gCamCapability[cameraId]->white_level, 1);
9113
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009114 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9115 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9116 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009117 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009118 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009119
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009120#ifndef USE_HAL_3_3
9121 bool hasBlackRegions = false;
9122 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9123 LOGW("black_region_count: %d is bounded to %d",
9124 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9125 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9126 }
9127 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9128 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9129 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9130 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9131 }
9132 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9133 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9134 hasBlackRegions = true;
9135 }
9136#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009137 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9138 &gCamCapability[cameraId]->flash_charge_duration, 1);
9139
9140 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9141 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9142
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009143 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9144 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9145 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009146 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9147 &timestampSource, 1);
9148
Thierry Strudel54dc9782017-02-15 12:12:10 -08009149 //update histogram vendor data
9150 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009151 &gCamCapability[cameraId]->histogram_size, 1);
9152
Thierry Strudel54dc9782017-02-15 12:12:10 -08009153 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009154 &gCamCapability[cameraId]->max_histogram_count, 1);
9155
Shuzhen Wang14415f52016-11-16 18:26:18 -08009156 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9157 //so that app can request fewer number of bins than the maximum supported.
9158 std::vector<int32_t> histBins;
9159 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9160 histBins.push_back(maxHistBins);
9161 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9162 (maxHistBins & 0x1) == 0) {
9163 histBins.push_back(maxHistBins >> 1);
9164 maxHistBins >>= 1;
9165 }
9166 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9167 histBins.data(), histBins.size());
9168
Thierry Strudel3d639192016-09-09 11:52:26 -07009169 int32_t sharpness_map_size[] = {
9170 gCamCapability[cameraId]->sharpness_map_size.width,
9171 gCamCapability[cameraId]->sharpness_map_size.height};
9172
9173 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9174 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9175
9176 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9177 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9178
Emilian Peev0f3c3162017-03-15 12:57:46 +00009179 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9180 if (0 <= indexPD) {
9181 // Advertise PD stats data as part of the Depth capabilities
9182 int32_t depthWidth =
9183 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9184 int32_t depthHeight =
9185 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9186 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9187 assert(0 < depthSamplesCount);
9188 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9189 &depthSamplesCount, 1);
9190
9191 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9192 depthHeight,
9193 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9194 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9195 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9196 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9197 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9198
9199 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9200 depthHeight, 33333333,
9201 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9202 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9203 depthMinDuration,
9204 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9205
9206 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9207 depthHeight, 0,
9208 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9209 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9210 depthStallDuration,
9211 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9212
9213 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9214 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9215 }
9216
Thierry Strudel3d639192016-09-09 11:52:26 -07009217 int32_t scalar_formats[] = {
9218 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9219 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9220 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9221 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9222 HAL_PIXEL_FORMAT_RAW10,
9223 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009224 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9225 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9226 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009227
9228 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9229 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9230 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9231 count, MAX_SIZES_CNT, available_processed_sizes);
9232 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9233 available_processed_sizes, count * 2);
9234
9235 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9236 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9237 makeTable(gCamCapability[cameraId]->raw_dim,
9238 count, MAX_SIZES_CNT, available_raw_sizes);
9239 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9240 available_raw_sizes, count * 2);
9241
9242 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9243 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9244 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9245 count, MAX_SIZES_CNT, available_fps_ranges);
9246 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9247 available_fps_ranges, count * 2);
9248
9249 camera_metadata_rational exposureCompensationStep = {
9250 gCamCapability[cameraId]->exp_compensation_step.numerator,
9251 gCamCapability[cameraId]->exp_compensation_step.denominator};
9252 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9253 &exposureCompensationStep, 1);
9254
9255 Vector<uint8_t> availableVstabModes;
9256 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9257 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009258 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009259 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009260 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009261 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009262 count = IS_TYPE_MAX;
9263 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9264 for (size_t i = 0; i < count; i++) {
9265 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9266 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9267 eisSupported = true;
9268 break;
9269 }
9270 }
9271 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009272 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9273 }
9274 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9275 availableVstabModes.array(), availableVstabModes.size());
9276
9277 /*HAL 1 and HAL 3 common*/
9278 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9279 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9280 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009281 // Cap the max zoom to the max preferred value
9282 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009283 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9284 &maxZoom, 1);
9285
9286 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9287 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9288
9289 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9290 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9291 max3aRegions[2] = 0; /* AF not supported */
9292 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9293 max3aRegions, 3);
9294
9295 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9296 memset(prop, 0, sizeof(prop));
9297 property_get("persist.camera.facedetect", prop, "1");
9298 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9299 LOGD("Support face detection mode: %d",
9300 supportedFaceDetectMode);
9301
9302 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009303 /* support mode should be OFF if max number of face is 0 */
9304 if (maxFaces <= 0) {
9305 supportedFaceDetectMode = 0;
9306 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009307 Vector<uint8_t> availableFaceDetectModes;
9308 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9309 if (supportedFaceDetectMode == 1) {
9310 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9311 } else if (supportedFaceDetectMode == 2) {
9312 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9313 } else if (supportedFaceDetectMode == 3) {
9314 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9315 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9316 } else {
9317 maxFaces = 0;
9318 }
9319 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9320 availableFaceDetectModes.array(),
9321 availableFaceDetectModes.size());
9322 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9323 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009324 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9325 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9326 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009327
9328 int32_t exposureCompensationRange[] = {
9329 gCamCapability[cameraId]->exposure_compensation_min,
9330 gCamCapability[cameraId]->exposure_compensation_max};
9331 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9332 exposureCompensationRange,
9333 sizeof(exposureCompensationRange)/sizeof(int32_t));
9334
9335 uint8_t lensFacing = (facingBack) ?
9336 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9337 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9338
9339 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9340 available_thumbnail_sizes,
9341 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9342
9343 /*all sizes will be clubbed into this tag*/
9344 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9345 /*android.scaler.availableStreamConfigurations*/
9346 Vector<int32_t> available_stream_configs;
9347 cam_dimension_t active_array_dim;
9348 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9349 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009350
9351 /*advertise list of input dimensions supported based on below property.
9352 By default all sizes upto 5MP will be advertised.
9353 Note that the setprop resolution format should be WxH.
9354 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9355 To list all supported sizes, setprop needs to be set with "0x0" */
9356 cam_dimension_t minInputSize = {2592,1944}; //5MP
9357 memset(prop, 0, sizeof(prop));
9358 property_get("persist.camera.input.minsize", prop, "2592x1944");
9359 if (strlen(prop) > 0) {
9360 char *saveptr = NULL;
9361 char *token = strtok_r(prop, "x", &saveptr);
9362 if (token != NULL) {
9363 minInputSize.width = atoi(token);
9364 }
9365 token = strtok_r(NULL, "x", &saveptr);
9366 if (token != NULL) {
9367 minInputSize.height = atoi(token);
9368 }
9369 }
9370
Thierry Strudel3d639192016-09-09 11:52:26 -07009371 /* Add input/output stream configurations for each scalar formats*/
9372 for (size_t j = 0; j < scalar_formats_count; j++) {
9373 switch (scalar_formats[j]) {
9374 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9375 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9376 case HAL_PIXEL_FORMAT_RAW10:
9377 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9378 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9379 addStreamConfig(available_stream_configs, scalar_formats[j],
9380 gCamCapability[cameraId]->raw_dim[i],
9381 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9382 }
9383 break;
9384 case HAL_PIXEL_FORMAT_BLOB:
9385 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9386 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9387 addStreamConfig(available_stream_configs, scalar_formats[j],
9388 gCamCapability[cameraId]->picture_sizes_tbl[i],
9389 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9390 }
9391 break;
9392 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9393 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9394 default:
9395 cam_dimension_t largest_picture_size;
9396 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9397 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9398 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9399 addStreamConfig(available_stream_configs, scalar_formats[j],
9400 gCamCapability[cameraId]->picture_sizes_tbl[i],
9401 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009402 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9403 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9404 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9405 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9406 >= minInputSize.width) || (gCamCapability[cameraId]->
9407 picture_sizes_tbl[i].height >= minInputSize.height)) {
9408 addStreamConfig(available_stream_configs, scalar_formats[j],
9409 gCamCapability[cameraId]->picture_sizes_tbl[i],
9410 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9411 }
9412 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009413 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009414
Thierry Strudel3d639192016-09-09 11:52:26 -07009415 break;
9416 }
9417 }
9418
9419 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9420 available_stream_configs.array(), available_stream_configs.size());
9421 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9422 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9423
9424 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9425 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9426
9427 /* android.scaler.availableMinFrameDurations */
9428 Vector<int64_t> available_min_durations;
9429 for (size_t j = 0; j < scalar_formats_count; j++) {
9430 switch (scalar_formats[j]) {
9431 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9432 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9433 case HAL_PIXEL_FORMAT_RAW10:
9434 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9435 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9436 available_min_durations.add(scalar_formats[j]);
9437 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9438 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9439 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9440 }
9441 break;
9442 default:
9443 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9444 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9445 available_min_durations.add(scalar_formats[j]);
9446 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9447 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9448 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9449 }
9450 break;
9451 }
9452 }
9453 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9454 available_min_durations.array(), available_min_durations.size());
9455
9456 Vector<int32_t> available_hfr_configs;
9457 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9458 int32_t fps = 0;
9459 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9460 case CAM_HFR_MODE_60FPS:
9461 fps = 60;
9462 break;
9463 case CAM_HFR_MODE_90FPS:
9464 fps = 90;
9465 break;
9466 case CAM_HFR_MODE_120FPS:
9467 fps = 120;
9468 break;
9469 case CAM_HFR_MODE_150FPS:
9470 fps = 150;
9471 break;
9472 case CAM_HFR_MODE_180FPS:
9473 fps = 180;
9474 break;
9475 case CAM_HFR_MODE_210FPS:
9476 fps = 210;
9477 break;
9478 case CAM_HFR_MODE_240FPS:
9479 fps = 240;
9480 break;
9481 case CAM_HFR_MODE_480FPS:
9482 fps = 480;
9483 break;
9484 case CAM_HFR_MODE_OFF:
9485 case CAM_HFR_MODE_MAX:
9486 default:
9487 break;
9488 }
9489
9490 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9491 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9492 /* For each HFR frame rate, need to advertise one variable fps range
9493 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9494 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9495 * set by the app. When video recording is started, [120, 120] is
9496 * set. This way sensor configuration does not change when recording
9497 * is started */
9498
9499 /* (width, height, fps_min, fps_max, batch_size_max) */
9500 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9501 j < MAX_SIZES_CNT; j++) {
9502 available_hfr_configs.add(
9503 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9504 available_hfr_configs.add(
9505 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9506 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9507 available_hfr_configs.add(fps);
9508 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9509
9510 /* (width, height, fps_min, fps_max, batch_size_max) */
9511 available_hfr_configs.add(
9512 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9513 available_hfr_configs.add(
9514 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9515 available_hfr_configs.add(fps);
9516 available_hfr_configs.add(fps);
9517 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9518 }
9519 }
9520 }
9521 //Advertise HFR capability only if the property is set
9522 memset(prop, 0, sizeof(prop));
9523 property_get("persist.camera.hal3hfr.enable", prop, "1");
9524 uint8_t hfrEnable = (uint8_t)atoi(prop);
9525
9526 if(hfrEnable && available_hfr_configs.array()) {
9527 staticInfo.update(
9528 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9529 available_hfr_configs.array(), available_hfr_configs.size());
9530 }
9531
9532 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9533 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9534 &max_jpeg_size, 1);
9535
9536 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9537 size_t size = 0;
9538 count = CAM_EFFECT_MODE_MAX;
9539 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9540 for (size_t i = 0; i < count; i++) {
9541 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9542 gCamCapability[cameraId]->supported_effects[i]);
9543 if (NAME_NOT_FOUND != val) {
9544 avail_effects[size] = (uint8_t)val;
9545 size++;
9546 }
9547 }
9548 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9549 avail_effects,
9550 size);
9551
9552 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9553 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9554 size_t supported_scene_modes_cnt = 0;
9555 count = CAM_SCENE_MODE_MAX;
9556 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9557 for (size_t i = 0; i < count; i++) {
9558 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9559 CAM_SCENE_MODE_OFF) {
9560 int val = lookupFwkName(SCENE_MODES_MAP,
9561 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9562 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009563
Thierry Strudel3d639192016-09-09 11:52:26 -07009564 if (NAME_NOT_FOUND != val) {
9565 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9566 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9567 supported_scene_modes_cnt++;
9568 }
9569 }
9570 }
9571 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9572 avail_scene_modes,
9573 supported_scene_modes_cnt);
9574
9575 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9576 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9577 supported_scene_modes_cnt,
9578 CAM_SCENE_MODE_MAX,
9579 scene_mode_overrides,
9580 supported_indexes,
9581 cameraId);
9582
9583 if (supported_scene_modes_cnt == 0) {
9584 supported_scene_modes_cnt = 1;
9585 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9586 }
9587
9588 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9589 scene_mode_overrides, supported_scene_modes_cnt * 3);
9590
9591 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9592 ANDROID_CONTROL_MODE_AUTO,
9593 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9594 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9595 available_control_modes,
9596 3);
9597
9598 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9599 size = 0;
9600 count = CAM_ANTIBANDING_MODE_MAX;
9601 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9602 for (size_t i = 0; i < count; i++) {
9603 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9604 gCamCapability[cameraId]->supported_antibandings[i]);
9605 if (NAME_NOT_FOUND != val) {
9606 avail_antibanding_modes[size] = (uint8_t)val;
9607 size++;
9608 }
9609
9610 }
9611 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9612 avail_antibanding_modes,
9613 size);
9614
9615 uint8_t avail_abberation_modes[] = {
9616 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9617 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9618 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9619 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9620 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9621 if (0 == count) {
9622 // If no aberration correction modes are available for a device, this advertise OFF mode
9623 size = 1;
9624 } else {
9625 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9626 // So, advertize all 3 modes if atleast any one mode is supported as per the
9627 // new M requirement
9628 size = 3;
9629 }
9630 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9631 avail_abberation_modes,
9632 size);
9633
9634 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9635 size = 0;
9636 count = CAM_FOCUS_MODE_MAX;
9637 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9638 for (size_t i = 0; i < count; i++) {
9639 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9640 gCamCapability[cameraId]->supported_focus_modes[i]);
9641 if (NAME_NOT_FOUND != val) {
9642 avail_af_modes[size] = (uint8_t)val;
9643 size++;
9644 }
9645 }
9646 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9647 avail_af_modes,
9648 size);
9649
9650 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9651 size = 0;
9652 count = CAM_WB_MODE_MAX;
9653 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9654 for (size_t i = 0; i < count; i++) {
9655 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9656 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9657 gCamCapability[cameraId]->supported_white_balances[i]);
9658 if (NAME_NOT_FOUND != val) {
9659 avail_awb_modes[size] = (uint8_t)val;
9660 size++;
9661 }
9662 }
9663 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9664 avail_awb_modes,
9665 size);
9666
9667 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9668 count = CAM_FLASH_FIRING_LEVEL_MAX;
9669 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9670 count);
9671 for (size_t i = 0; i < count; i++) {
9672 available_flash_levels[i] =
9673 gCamCapability[cameraId]->supported_firing_levels[i];
9674 }
9675 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9676 available_flash_levels, count);
9677
9678 uint8_t flashAvailable;
9679 if (gCamCapability[cameraId]->flash_available)
9680 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9681 else
9682 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9683 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9684 &flashAvailable, 1);
9685
9686 Vector<uint8_t> avail_ae_modes;
9687 count = CAM_AE_MODE_MAX;
9688 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9689 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009690 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9691 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9692 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9693 }
9694 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009695 }
9696 if (flashAvailable) {
9697 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9698 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9699 }
9700 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9701 avail_ae_modes.array(),
9702 avail_ae_modes.size());
9703
9704 int32_t sensitivity_range[2];
9705 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9706 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9707 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9708 sensitivity_range,
9709 sizeof(sensitivity_range) / sizeof(int32_t));
9710
9711 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9712 &gCamCapability[cameraId]->max_analog_sensitivity,
9713 1);
9714
9715 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9716 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9717 &sensor_orientation,
9718 1);
9719
9720 int32_t max_output_streams[] = {
9721 MAX_STALLING_STREAMS,
9722 MAX_PROCESSED_STREAMS,
9723 MAX_RAW_STREAMS};
9724 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9725 max_output_streams,
9726 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9727
9728 uint8_t avail_leds = 0;
9729 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9730 &avail_leds, 0);
9731
9732 uint8_t focus_dist_calibrated;
9733 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9734 gCamCapability[cameraId]->focus_dist_calibrated);
9735 if (NAME_NOT_FOUND != val) {
9736 focus_dist_calibrated = (uint8_t)val;
9737 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9738 &focus_dist_calibrated, 1);
9739 }
9740
9741 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9742 size = 0;
9743 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9744 MAX_TEST_PATTERN_CNT);
9745 for (size_t i = 0; i < count; i++) {
9746 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9747 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9748 if (NAME_NOT_FOUND != testpatternMode) {
9749 avail_testpattern_modes[size] = testpatternMode;
9750 size++;
9751 }
9752 }
9753 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9754 avail_testpattern_modes,
9755 size);
9756
9757 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9758 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9759 &max_pipeline_depth,
9760 1);
9761
9762 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9763 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9764 &partial_result_count,
9765 1);
9766
9767 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9768 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9769
9770 Vector<uint8_t> available_capabilities;
9771 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9772 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9773 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9774 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9775 if (supportBurst) {
9776 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9777 }
9778 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9779 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9780 if (hfrEnable && available_hfr_configs.array()) {
9781 available_capabilities.add(
9782 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9783 }
9784
9785 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9786 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9787 }
9788 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9789 available_capabilities.array(),
9790 available_capabilities.size());
9791
9792 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9793 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9794 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9795 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9796
9797 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9798 &aeLockAvailable, 1);
9799
9800 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9801 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9802 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9803 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9804
9805 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9806 &awbLockAvailable, 1);
9807
9808 int32_t max_input_streams = 1;
9809 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9810 &max_input_streams,
9811 1);
9812
9813 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9814 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9815 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9816 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9817 HAL_PIXEL_FORMAT_YCbCr_420_888};
9818 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9819 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9820
9821 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9822 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9823 &max_latency,
9824 1);
9825
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009826#ifndef USE_HAL_3_3
9827 int32_t isp_sensitivity_range[2];
9828 isp_sensitivity_range[0] =
9829 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9830 isp_sensitivity_range[1] =
9831 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9832 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9833 isp_sensitivity_range,
9834 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9835#endif
9836
Thierry Strudel3d639192016-09-09 11:52:26 -07009837 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9838 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9839 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9840 available_hot_pixel_modes,
9841 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9842
9843 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9844 ANDROID_SHADING_MODE_FAST,
9845 ANDROID_SHADING_MODE_HIGH_QUALITY};
9846 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9847 available_shading_modes,
9848 3);
9849
9850 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9851 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9852 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9853 available_lens_shading_map_modes,
9854 2);
9855
9856 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9857 ANDROID_EDGE_MODE_FAST,
9858 ANDROID_EDGE_MODE_HIGH_QUALITY,
9859 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9860 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9861 available_edge_modes,
9862 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9863
9864 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9865 ANDROID_NOISE_REDUCTION_MODE_FAST,
9866 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9867 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9868 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9869 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9870 available_noise_red_modes,
9871 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9872
9873 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9874 ANDROID_TONEMAP_MODE_FAST,
9875 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9876 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9877 available_tonemap_modes,
9878 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9879
9880 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9881 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9882 available_hot_pixel_map_modes,
9883 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9884
9885 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9886 gCamCapability[cameraId]->reference_illuminant1);
9887 if (NAME_NOT_FOUND != val) {
9888 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9889 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9890 }
9891
9892 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9893 gCamCapability[cameraId]->reference_illuminant2);
9894 if (NAME_NOT_FOUND != val) {
9895 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9896 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9897 }
9898
9899 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9900 (void *)gCamCapability[cameraId]->forward_matrix1,
9901 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9902
9903 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9904 (void *)gCamCapability[cameraId]->forward_matrix2,
9905 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9906
9907 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9908 (void *)gCamCapability[cameraId]->color_transform1,
9909 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9910
9911 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9912 (void *)gCamCapability[cameraId]->color_transform2,
9913 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9914
9915 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9916 (void *)gCamCapability[cameraId]->calibration_transform1,
9917 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9918
9919 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9920 (void *)gCamCapability[cameraId]->calibration_transform2,
9921 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9922
9923 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9924 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9925 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9926 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9927 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9928 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9929 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9930 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9931 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9932 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9933 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9934 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9935 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9936 ANDROID_JPEG_GPS_COORDINATES,
9937 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9938 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9939 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9940 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9941 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9942 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9943 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9944 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9945 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9946 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009947#ifndef USE_HAL_3_3
9948 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9949#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009950 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009951 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009952 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9953 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009954 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009955 /* DevCamDebug metadata request_keys_basic */
9956 DEVCAMDEBUG_META_ENABLE,
9957 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009958 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07009959 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07009960 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -07009961 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -08009962 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009963
9964 size_t request_keys_cnt =
9965 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9966 Vector<int32_t> available_request_keys;
9967 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9968 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9969 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9970 }
9971
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009972 if (gExposeEnableZslKey) {
Chien-Yu Chened0a4c92017-05-01 18:25:03 +00009973 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009974 }
9975
Thierry Strudel3d639192016-09-09 11:52:26 -07009976 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9977 available_request_keys.array(), available_request_keys.size());
9978
9979 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9980 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9981 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9982 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9983 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9984 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9985 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9986 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9987 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9988 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9989 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9990 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9991 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9992 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9993 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9994 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9995 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009996 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009997 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9998 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9999 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010000 ANDROID_STATISTICS_FACE_SCORES,
10001#ifndef USE_HAL_3_3
10002 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10003#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010004 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010005 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010006 // DevCamDebug metadata result_keys_basic
10007 DEVCAMDEBUG_META_ENABLE,
10008 // DevCamDebug metadata result_keys AF
10009 DEVCAMDEBUG_AF_LENS_POSITION,
10010 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10011 DEVCAMDEBUG_AF_TOF_DISTANCE,
10012 DEVCAMDEBUG_AF_LUMA,
10013 DEVCAMDEBUG_AF_HAF_STATE,
10014 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10015 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10016 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10017 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10018 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10019 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10020 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10021 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10022 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10023 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10024 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10025 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10026 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10027 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10028 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10029 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10030 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10031 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10032 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10033 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10034 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10035 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10036 // DevCamDebug metadata result_keys AEC
10037 DEVCAMDEBUG_AEC_TARGET_LUMA,
10038 DEVCAMDEBUG_AEC_COMP_LUMA,
10039 DEVCAMDEBUG_AEC_AVG_LUMA,
10040 DEVCAMDEBUG_AEC_CUR_LUMA,
10041 DEVCAMDEBUG_AEC_LINECOUNT,
10042 DEVCAMDEBUG_AEC_REAL_GAIN,
10043 DEVCAMDEBUG_AEC_EXP_INDEX,
10044 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010045 // DevCamDebug metadata result_keys zzHDR
10046 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10047 DEVCAMDEBUG_AEC_L_LINECOUNT,
10048 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10049 DEVCAMDEBUG_AEC_S_LINECOUNT,
10050 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10051 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10052 // DevCamDebug metadata result_keys ADRC
10053 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10054 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10055 DEVCAMDEBUG_AEC_GTM_RATIO,
10056 DEVCAMDEBUG_AEC_LTM_RATIO,
10057 DEVCAMDEBUG_AEC_LA_RATIO,
10058 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010059 // DevCamDebug metadata result_keys AWB
10060 DEVCAMDEBUG_AWB_R_GAIN,
10061 DEVCAMDEBUG_AWB_G_GAIN,
10062 DEVCAMDEBUG_AWB_B_GAIN,
10063 DEVCAMDEBUG_AWB_CCT,
10064 DEVCAMDEBUG_AWB_DECISION,
10065 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010066 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10067 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10068 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010069 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010070 };
10071
Thierry Strudel3d639192016-09-09 11:52:26 -070010072 size_t result_keys_cnt =
10073 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10074
10075 Vector<int32_t> available_result_keys;
10076 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10077 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10078 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10079 }
10080 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10081 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10082 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10083 }
10084 if (supportedFaceDetectMode == 1) {
10085 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10086 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10087 } else if ((supportedFaceDetectMode == 2) ||
10088 (supportedFaceDetectMode == 3)) {
10089 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10090 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10091 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010092#ifndef USE_HAL_3_3
10093 if (hasBlackRegions) {
10094 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10095 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10096 }
10097#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010098
10099 if (gExposeEnableZslKey) {
10100 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10101 }
10102
Thierry Strudel3d639192016-09-09 11:52:26 -070010103 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10104 available_result_keys.array(), available_result_keys.size());
10105
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010106 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010107 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10108 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10109 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10110 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10111 ANDROID_SCALER_CROPPING_TYPE,
10112 ANDROID_SYNC_MAX_LATENCY,
10113 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10114 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10115 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10116 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10117 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10118 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10119 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10120 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10121 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10122 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10123 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10124 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10125 ANDROID_LENS_FACING,
10126 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10127 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10128 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10129 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10130 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10131 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10132 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10133 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10134 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10135 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10136 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10137 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10138 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10139 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10140 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10141 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10142 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10143 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10144 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10145 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010146 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010147 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10148 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10149 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10150 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10151 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10152 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10153 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10154 ANDROID_CONTROL_AVAILABLE_MODES,
10155 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10156 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10157 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10158 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010159 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10160#ifndef USE_HAL_3_3
10161 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10162 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10163#endif
10164 };
10165
10166 Vector<int32_t> available_characteristics_keys;
10167 available_characteristics_keys.appendArray(characteristics_keys_basic,
10168 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10169#ifndef USE_HAL_3_3
10170 if (hasBlackRegions) {
10171 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10172 }
10173#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010174
10175 if (0 <= indexPD) {
10176 int32_t depthKeys[] = {
10177 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10178 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10179 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10180 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10181 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10182 };
10183 available_characteristics_keys.appendArray(depthKeys,
10184 sizeof(depthKeys) / sizeof(depthKeys[0]));
10185 }
10186
Thierry Strudel3d639192016-09-09 11:52:26 -070010187 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010188 available_characteristics_keys.array(),
10189 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010190
10191 /*available stall durations depend on the hw + sw and will be different for different devices */
10192 /*have to add for raw after implementation*/
10193 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10194 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10195
10196 Vector<int64_t> available_stall_durations;
10197 for (uint32_t j = 0; j < stall_formats_count; j++) {
10198 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10199 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10200 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10201 available_stall_durations.add(stall_formats[j]);
10202 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10203 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10204 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10205 }
10206 } else {
10207 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10208 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10209 available_stall_durations.add(stall_formats[j]);
10210 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10211 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10212 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10213 }
10214 }
10215 }
10216 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10217 available_stall_durations.array(),
10218 available_stall_durations.size());
10219
10220 //QCAMERA3_OPAQUE_RAW
10221 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10222 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10223 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10224 case LEGACY_RAW:
10225 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10226 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10227 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10228 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10229 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10230 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10231 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10232 break;
10233 case MIPI_RAW:
10234 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10235 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10236 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10237 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10238 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10239 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10240 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10241 break;
10242 default:
10243 LOGE("unknown opaque_raw_format %d",
10244 gCamCapability[cameraId]->opaque_raw_fmt);
10245 break;
10246 }
10247 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10248
10249 Vector<int32_t> strides;
10250 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10251 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10252 cam_stream_buf_plane_info_t buf_planes;
10253 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10254 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10255 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10256 &gCamCapability[cameraId]->padding_info, &buf_planes);
10257 strides.add(buf_planes.plane_info.mp[0].stride);
10258 }
10259 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10260 strides.size());
10261
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010262 //TBD: remove the following line once backend advertises zzHDR in feature mask
10263 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010264 //Video HDR default
10265 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10266 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010267 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010268 int32_t vhdr_mode[] = {
10269 QCAMERA3_VIDEO_HDR_MODE_OFF,
10270 QCAMERA3_VIDEO_HDR_MODE_ON};
10271
10272 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10273 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10274 vhdr_mode, vhdr_mode_count);
10275 }
10276
Thierry Strudel3d639192016-09-09 11:52:26 -070010277 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10278 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10279 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10280
10281 uint8_t isMonoOnly =
10282 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10283 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10284 &isMonoOnly, 1);
10285
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010286#ifndef USE_HAL_3_3
10287 Vector<int32_t> opaque_size;
10288 for (size_t j = 0; j < scalar_formats_count; j++) {
10289 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10290 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10291 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10292 cam_stream_buf_plane_info_t buf_planes;
10293
10294 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10295 &gCamCapability[cameraId]->padding_info, &buf_planes);
10296
10297 if (rc == 0) {
10298 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10299 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10300 opaque_size.add(buf_planes.plane_info.frame_len);
10301 }else {
10302 LOGE("raw frame calculation failed!");
10303 }
10304 }
10305 }
10306 }
10307
10308 if ((opaque_size.size() > 0) &&
10309 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10310 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10311 else
10312 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10313#endif
10314
Thierry Strudel04e026f2016-10-10 11:27:36 -070010315 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10316 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10317 size = 0;
10318 count = CAM_IR_MODE_MAX;
10319 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10320 for (size_t i = 0; i < count; i++) {
10321 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10322 gCamCapability[cameraId]->supported_ir_modes[i]);
10323 if (NAME_NOT_FOUND != val) {
10324 avail_ir_modes[size] = (int32_t)val;
10325 size++;
10326 }
10327 }
10328 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10329 avail_ir_modes, size);
10330 }
10331
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010332 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10333 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10334 size = 0;
10335 count = CAM_AEC_CONVERGENCE_MAX;
10336 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10337 for (size_t i = 0; i < count; i++) {
10338 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10339 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10340 if (NAME_NOT_FOUND != val) {
10341 available_instant_aec_modes[size] = (int32_t)val;
10342 size++;
10343 }
10344 }
10345 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10346 available_instant_aec_modes, size);
10347 }
10348
Thierry Strudel54dc9782017-02-15 12:12:10 -080010349 int32_t sharpness_range[] = {
10350 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10351 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10352 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10353
10354 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10355 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10356 size = 0;
10357 count = CAM_BINNING_CORRECTION_MODE_MAX;
10358 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10359 for (size_t i = 0; i < count; i++) {
10360 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10361 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10362 gCamCapability[cameraId]->supported_binning_modes[i]);
10363 if (NAME_NOT_FOUND != val) {
10364 avail_binning_modes[size] = (int32_t)val;
10365 size++;
10366 }
10367 }
10368 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10369 avail_binning_modes, size);
10370 }
10371
10372 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10373 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10374 size = 0;
10375 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10376 for (size_t i = 0; i < count; i++) {
10377 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10378 gCamCapability[cameraId]->supported_aec_modes[i]);
10379 if (NAME_NOT_FOUND != val)
10380 available_aec_modes[size++] = val;
10381 }
10382 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10383 available_aec_modes, size);
10384 }
10385
10386 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10387 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10388 size = 0;
10389 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10390 for (size_t i = 0; i < count; i++) {
10391 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10392 gCamCapability[cameraId]->supported_iso_modes[i]);
10393 if (NAME_NOT_FOUND != val)
10394 available_iso_modes[size++] = val;
10395 }
10396 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10397 available_iso_modes, size);
10398 }
10399
10400 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010401 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010402 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10403 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10404 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10405
10406 int32_t available_saturation_range[4];
10407 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10408 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10409 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10410 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10411 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10412 available_saturation_range, 4);
10413
10414 uint8_t is_hdr_values[2];
10415 is_hdr_values[0] = 0;
10416 is_hdr_values[1] = 1;
10417 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10418 is_hdr_values, 2);
10419
10420 float is_hdr_confidence_range[2];
10421 is_hdr_confidence_range[0] = 0.0;
10422 is_hdr_confidence_range[1] = 1.0;
10423 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10424 is_hdr_confidence_range, 2);
10425
Emilian Peev0a972ef2017-03-16 10:25:53 +000010426 size_t eepromLength = strnlen(
10427 reinterpret_cast<const char *>(
10428 gCamCapability[cameraId]->eeprom_version_info),
10429 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10430 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010431 char easelInfo[] = ",E:N";
10432 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10433 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10434 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010435 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10436 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010437 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010438 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10439 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10440 }
10441
Thierry Strudel3d639192016-09-09 11:52:26 -070010442 gStaticMetadata[cameraId] = staticInfo.release();
10443 return rc;
10444}
10445
10446/*===========================================================================
10447 * FUNCTION : makeTable
10448 *
10449 * DESCRIPTION: make a table of sizes
10450 *
10451 * PARAMETERS :
10452 *
10453 *
10454 *==========================================================================*/
10455void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10456 size_t max_size, int32_t *sizeTable)
10457{
10458 size_t j = 0;
10459 if (size > max_size) {
10460 size = max_size;
10461 }
10462 for (size_t i = 0; i < size; i++) {
10463 sizeTable[j] = dimTable[i].width;
10464 sizeTable[j+1] = dimTable[i].height;
10465 j+=2;
10466 }
10467}
10468
10469/*===========================================================================
10470 * FUNCTION : makeFPSTable
10471 *
10472 * DESCRIPTION: make a table of fps ranges
10473 *
10474 * PARAMETERS :
10475 *
10476 *==========================================================================*/
10477void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10478 size_t max_size, int32_t *fpsRangesTable)
10479{
10480 size_t j = 0;
10481 if (size > max_size) {
10482 size = max_size;
10483 }
10484 for (size_t i = 0; i < size; i++) {
10485 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10486 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10487 j+=2;
10488 }
10489}
10490
10491/*===========================================================================
10492 * FUNCTION : makeOverridesList
10493 *
10494 * DESCRIPTION: make a list of scene mode overrides
10495 *
10496 * PARAMETERS :
10497 *
10498 *
10499 *==========================================================================*/
10500void QCamera3HardwareInterface::makeOverridesList(
10501 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10502 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10503{
10504 /*daemon will give a list of overrides for all scene modes.
10505 However we should send the fwk only the overrides for the scene modes
10506 supported by the framework*/
10507 size_t j = 0;
10508 if (size > max_size) {
10509 size = max_size;
10510 }
10511 size_t focus_count = CAM_FOCUS_MODE_MAX;
10512 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10513 focus_count);
10514 for (size_t i = 0; i < size; i++) {
10515 bool supt = false;
10516 size_t index = supported_indexes[i];
10517 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10518 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10519 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10520 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10521 overridesTable[index].awb_mode);
10522 if (NAME_NOT_FOUND != val) {
10523 overridesList[j+1] = (uint8_t)val;
10524 }
10525 uint8_t focus_override = overridesTable[index].af_mode;
10526 for (size_t k = 0; k < focus_count; k++) {
10527 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10528 supt = true;
10529 break;
10530 }
10531 }
10532 if (supt) {
10533 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10534 focus_override);
10535 if (NAME_NOT_FOUND != val) {
10536 overridesList[j+2] = (uint8_t)val;
10537 }
10538 } else {
10539 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10540 }
10541 j+=3;
10542 }
10543}
10544
10545/*===========================================================================
10546 * FUNCTION : filterJpegSizes
10547 *
10548 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10549 * could be downscaled to
10550 *
10551 * PARAMETERS :
10552 *
10553 * RETURN : length of jpegSizes array
10554 *==========================================================================*/
10555
10556size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10557 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10558 uint8_t downscale_factor)
10559{
10560 if (0 == downscale_factor) {
10561 downscale_factor = 1;
10562 }
10563
10564 int32_t min_width = active_array_size.width / downscale_factor;
10565 int32_t min_height = active_array_size.height / downscale_factor;
10566 size_t jpegSizesCnt = 0;
10567 if (processedSizesCnt > maxCount) {
10568 processedSizesCnt = maxCount;
10569 }
10570 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10571 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10572 jpegSizes[jpegSizesCnt] = processedSizes[i];
10573 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10574 jpegSizesCnt += 2;
10575 }
10576 }
10577 return jpegSizesCnt;
10578}
10579
10580/*===========================================================================
10581 * FUNCTION : computeNoiseModelEntryS
10582 *
10583 * DESCRIPTION: function to map a given sensitivity to the S noise
10584 * model parameters in the DNG noise model.
10585 *
10586 * PARAMETERS : sens : the sensor sensitivity
10587 *
10588 ** RETURN : S (sensor amplification) noise
10589 *
10590 *==========================================================================*/
10591double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10592 double s = gCamCapability[mCameraId]->gradient_S * sens +
10593 gCamCapability[mCameraId]->offset_S;
10594 return ((s < 0.0) ? 0.0 : s);
10595}
10596
10597/*===========================================================================
10598 * FUNCTION : computeNoiseModelEntryO
10599 *
10600 * DESCRIPTION: function to map a given sensitivity to the O noise
10601 * model parameters in the DNG noise model.
10602 *
10603 * PARAMETERS : sens : the sensor sensitivity
10604 *
10605 ** RETURN : O (sensor readout) noise
10606 *
10607 *==========================================================================*/
10608double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10609 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10610 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10611 1.0 : (1.0 * sens / max_analog_sens);
10612 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10613 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10614 return ((o < 0.0) ? 0.0 : o);
10615}
10616
10617/*===========================================================================
10618 * FUNCTION : getSensorSensitivity
10619 *
10620 * DESCRIPTION: convert iso_mode to an integer value
10621 *
10622 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10623 *
10624 ** RETURN : sensitivity supported by sensor
10625 *
10626 *==========================================================================*/
10627int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10628{
10629 int32_t sensitivity;
10630
10631 switch (iso_mode) {
10632 case CAM_ISO_MODE_100:
10633 sensitivity = 100;
10634 break;
10635 case CAM_ISO_MODE_200:
10636 sensitivity = 200;
10637 break;
10638 case CAM_ISO_MODE_400:
10639 sensitivity = 400;
10640 break;
10641 case CAM_ISO_MODE_800:
10642 sensitivity = 800;
10643 break;
10644 case CAM_ISO_MODE_1600:
10645 sensitivity = 1600;
10646 break;
10647 default:
10648 sensitivity = -1;
10649 break;
10650 }
10651 return sensitivity;
10652}
10653
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010654int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010655 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010656 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10657 // to connect to Easel.
10658 bool doNotpowerOnEasel =
10659 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10660
10661 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010662 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10663 return OK;
10664 }
10665
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010666 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010667 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010668 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010669 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010670 return res;
10671 }
10672
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010673 EaselManagerClientOpened = true;
10674
10675 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010676 if (res != OK) {
10677 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10678 }
10679
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010680 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010681 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010682
10683 // Expose enableZsl key only when HDR+ mode is enabled.
10684 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010685 }
10686
10687 return OK;
10688}
10689
Thierry Strudel3d639192016-09-09 11:52:26 -070010690/*===========================================================================
10691 * FUNCTION : getCamInfo
10692 *
10693 * DESCRIPTION: query camera capabilities
10694 *
10695 * PARAMETERS :
10696 * @cameraId : camera Id
10697 * @info : camera info struct to be filled in with camera capabilities
10698 *
10699 * RETURN : int type of status
10700 * NO_ERROR -- success
10701 * none-zero failure code
10702 *==========================================================================*/
10703int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10704 struct camera_info *info)
10705{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010706 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010707 int rc = 0;
10708
10709 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010710
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010711 {
10712 Mutex::Autolock l(gHdrPlusClientLock);
10713 rc = initHdrPlusClientLocked();
10714 if (rc != OK) {
10715 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10716 pthread_mutex_unlock(&gCamLock);
10717 return rc;
10718 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010719 }
10720
Thierry Strudel3d639192016-09-09 11:52:26 -070010721 if (NULL == gCamCapability[cameraId]) {
10722 rc = initCapabilities(cameraId);
10723 if (rc < 0) {
10724 pthread_mutex_unlock(&gCamLock);
10725 return rc;
10726 }
10727 }
10728
10729 if (NULL == gStaticMetadata[cameraId]) {
10730 rc = initStaticMetadata(cameraId);
10731 if (rc < 0) {
10732 pthread_mutex_unlock(&gCamLock);
10733 return rc;
10734 }
10735 }
10736
10737 switch(gCamCapability[cameraId]->position) {
10738 case CAM_POSITION_BACK:
10739 case CAM_POSITION_BACK_AUX:
10740 info->facing = CAMERA_FACING_BACK;
10741 break;
10742
10743 case CAM_POSITION_FRONT:
10744 case CAM_POSITION_FRONT_AUX:
10745 info->facing = CAMERA_FACING_FRONT;
10746 break;
10747
10748 default:
10749 LOGE("Unknown position type %d for camera id:%d",
10750 gCamCapability[cameraId]->position, cameraId);
10751 rc = -1;
10752 break;
10753 }
10754
10755
10756 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010757#ifndef USE_HAL_3_3
10758 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10759#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010760 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010761#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010762 info->static_camera_characteristics = gStaticMetadata[cameraId];
10763
10764 //For now assume both cameras can operate independently.
10765 info->conflicting_devices = NULL;
10766 info->conflicting_devices_length = 0;
10767
10768 //resource cost is 100 * MIN(1.0, m/M),
10769 //where m is throughput requirement with maximum stream configuration
10770 //and M is CPP maximum throughput.
10771 float max_fps = 0.0;
10772 for (uint32_t i = 0;
10773 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10774 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10775 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10776 }
10777 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10778 gCamCapability[cameraId]->active_array_size.width *
10779 gCamCapability[cameraId]->active_array_size.height * max_fps /
10780 gCamCapability[cameraId]->max_pixel_bandwidth;
10781 info->resource_cost = 100 * MIN(1.0, ratio);
10782 LOGI("camera %d resource cost is %d", cameraId,
10783 info->resource_cost);
10784
10785 pthread_mutex_unlock(&gCamLock);
10786 return rc;
10787}
10788
10789/*===========================================================================
10790 * FUNCTION : translateCapabilityToMetadata
10791 *
10792 * DESCRIPTION: translate the capability into camera_metadata_t
10793 *
10794 * PARAMETERS : type of the request
10795 *
10796 *
10797 * RETURN : success: camera_metadata_t*
10798 * failure: NULL
10799 *
10800 *==========================================================================*/
10801camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10802{
10803 if (mDefaultMetadata[type] != NULL) {
10804 return mDefaultMetadata[type];
10805 }
10806 //first time we are handling this request
10807 //fill up the metadata structure using the wrapper class
10808 CameraMetadata settings;
10809 //translate from cam_capability_t to camera_metadata_tag_t
10810 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10811 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10812 int32_t defaultRequestID = 0;
10813 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10814
10815 /* OIS disable */
10816 char ois_prop[PROPERTY_VALUE_MAX];
10817 memset(ois_prop, 0, sizeof(ois_prop));
10818 property_get("persist.camera.ois.disable", ois_prop, "0");
10819 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10820
10821 /* Force video to use OIS */
10822 char videoOisProp[PROPERTY_VALUE_MAX];
10823 memset(videoOisProp, 0, sizeof(videoOisProp));
10824 property_get("persist.camera.ois.video", videoOisProp, "1");
10825 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010826
10827 // Hybrid AE enable/disable
10828 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10829 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10830 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10831 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10832
Thierry Strudel3d639192016-09-09 11:52:26 -070010833 uint8_t controlIntent = 0;
10834 uint8_t focusMode;
10835 uint8_t vsMode;
10836 uint8_t optStabMode;
10837 uint8_t cacMode;
10838 uint8_t edge_mode;
10839 uint8_t noise_red_mode;
10840 uint8_t tonemap_mode;
10841 bool highQualityModeEntryAvailable = FALSE;
10842 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010843 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010844 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10845 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010846 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010847 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010848 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010849
Thierry Strudel3d639192016-09-09 11:52:26 -070010850 switch (type) {
10851 case CAMERA3_TEMPLATE_PREVIEW:
10852 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10853 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10854 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10855 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10856 edge_mode = ANDROID_EDGE_MODE_FAST;
10857 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10858 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10859 break;
10860 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10861 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10862 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10863 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10864 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10865 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10866 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10867 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10868 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10869 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10870 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10871 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10872 highQualityModeEntryAvailable = TRUE;
10873 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10874 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10875 fastModeEntryAvailable = TRUE;
10876 }
10877 }
10878 if (highQualityModeEntryAvailable) {
10879 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10880 } else if (fastModeEntryAvailable) {
10881 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10882 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010883 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10884 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10885 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010886 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010887 break;
10888 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10889 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10890 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10891 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010892 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10893 edge_mode = ANDROID_EDGE_MODE_FAST;
10894 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10895 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10896 if (forceVideoOis)
10897 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10898 break;
10899 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10900 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10901 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10902 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010903 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10904 edge_mode = ANDROID_EDGE_MODE_FAST;
10905 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10906 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10907 if (forceVideoOis)
10908 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10909 break;
10910 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10911 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10912 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10913 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10914 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10915 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10916 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10917 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10918 break;
10919 case CAMERA3_TEMPLATE_MANUAL:
10920 edge_mode = ANDROID_EDGE_MODE_FAST;
10921 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10922 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10923 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10924 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10925 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10926 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10927 break;
10928 default:
10929 edge_mode = ANDROID_EDGE_MODE_FAST;
10930 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10931 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10932 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10933 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10934 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10935 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10936 break;
10937 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010938 // Set CAC to OFF if underlying device doesn't support
10939 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10940 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10941 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010942 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10943 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10944 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10945 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10946 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10947 }
10948 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010949 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010950 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010951
10952 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10953 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10954 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10955 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10956 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10957 || ois_disable)
10958 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10959 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010960 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010961
10962 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10963 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10964
10965 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10966 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10967
10968 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10969 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10970
10971 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10972 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10973
10974 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10975 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10976
10977 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10978 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10979
10980 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10981 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10982
10983 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10984 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10985
10986 /*flash*/
10987 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10988 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10989
10990 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10991 settings.update(ANDROID_FLASH_FIRING_POWER,
10992 &flashFiringLevel, 1);
10993
10994 /* lens */
10995 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10996 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10997
10998 if (gCamCapability[mCameraId]->filter_densities_count) {
10999 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11000 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11001 gCamCapability[mCameraId]->filter_densities_count);
11002 }
11003
11004 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11005 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11006
Thierry Strudel3d639192016-09-09 11:52:26 -070011007 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11008 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11009
11010 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11011 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11012
11013 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11014 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11015
11016 /* face detection (default to OFF) */
11017 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11018 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11019
Thierry Strudel54dc9782017-02-15 12:12:10 -080011020 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11021 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011022
11023 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11024 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11025
11026 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11027 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11028
Thierry Strudel3d639192016-09-09 11:52:26 -070011029
11030 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11031 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11032
11033 /* Exposure time(Update the Min Exposure Time)*/
11034 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11035 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11036
11037 /* frame duration */
11038 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11039 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11040
11041 /* sensitivity */
11042 static const int32_t default_sensitivity = 100;
11043 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011044#ifndef USE_HAL_3_3
11045 static const int32_t default_isp_sensitivity =
11046 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11047 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11048#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011049
11050 /*edge mode*/
11051 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11052
11053 /*noise reduction mode*/
11054 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11055
11056 /*color correction mode*/
11057 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11058 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11059
11060 /*transform matrix mode*/
11061 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11062
11063 int32_t scaler_crop_region[4];
11064 scaler_crop_region[0] = 0;
11065 scaler_crop_region[1] = 0;
11066 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11067 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11068 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11069
11070 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11071 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11072
11073 /*focus distance*/
11074 float focus_distance = 0.0;
11075 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11076
11077 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011078 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011079 float max_range = 0.0;
11080 float max_fixed_fps = 0.0;
11081 int32_t fps_range[2] = {0, 0};
11082 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11083 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011084 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11085 TEMPLATE_MAX_PREVIEW_FPS) {
11086 continue;
11087 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011088 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11089 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11090 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11091 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11092 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11093 if (range > max_range) {
11094 fps_range[0] =
11095 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11096 fps_range[1] =
11097 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11098 max_range = range;
11099 }
11100 } else {
11101 if (range < 0.01 && max_fixed_fps <
11102 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11103 fps_range[0] =
11104 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11105 fps_range[1] =
11106 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11107 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11108 }
11109 }
11110 }
11111 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11112
11113 /*precapture trigger*/
11114 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11115 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11116
11117 /*af trigger*/
11118 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11119 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11120
11121 /* ae & af regions */
11122 int32_t active_region[] = {
11123 gCamCapability[mCameraId]->active_array_size.left,
11124 gCamCapability[mCameraId]->active_array_size.top,
11125 gCamCapability[mCameraId]->active_array_size.left +
11126 gCamCapability[mCameraId]->active_array_size.width,
11127 gCamCapability[mCameraId]->active_array_size.top +
11128 gCamCapability[mCameraId]->active_array_size.height,
11129 0};
11130 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11131 sizeof(active_region) / sizeof(active_region[0]));
11132 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11133 sizeof(active_region) / sizeof(active_region[0]));
11134
11135 /* black level lock */
11136 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11137 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11138
Thierry Strudel3d639192016-09-09 11:52:26 -070011139 //special defaults for manual template
11140 if (type == CAMERA3_TEMPLATE_MANUAL) {
11141 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11142 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11143
11144 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11145 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11146
11147 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11148 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11149
11150 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11151 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11152
11153 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11154 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11155
11156 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11157 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11158 }
11159
11160
11161 /* TNR
11162 * We'll use this location to determine which modes TNR will be set.
11163 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11164 * This is not to be confused with linking on a per stream basis that decision
11165 * is still on per-session basis and will be handled as part of config stream
11166 */
11167 uint8_t tnr_enable = 0;
11168
11169 if (m_bTnrPreview || m_bTnrVideo) {
11170
11171 switch (type) {
11172 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11173 tnr_enable = 1;
11174 break;
11175
11176 default:
11177 tnr_enable = 0;
11178 break;
11179 }
11180
11181 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11182 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11183 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11184
11185 LOGD("TNR:%d with process plate %d for template:%d",
11186 tnr_enable, tnr_process_type, type);
11187 }
11188
11189 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011190 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011191 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11192
Shuzhen Wang920ea402017-05-03 08:49:39 -070011193 uint8_t is_main = 0; //this doesn't matter as app should overwrite
Thierry Strudel3d639192016-09-09 11:52:26 -070011194 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11195
Shuzhen Wang920ea402017-05-03 08:49:39 -070011196 uint8_t related_camera_id = mCameraId;
11197 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011198
11199 /* CDS default */
11200 char prop[PROPERTY_VALUE_MAX];
11201 memset(prop, 0, sizeof(prop));
11202 property_get("persist.camera.CDS", prop, "Auto");
11203 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11204 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11205 if (CAM_CDS_MODE_MAX == cds_mode) {
11206 cds_mode = CAM_CDS_MODE_AUTO;
11207 }
11208
11209 /* Disabling CDS in templates which have TNR enabled*/
11210 if (tnr_enable)
11211 cds_mode = CAM_CDS_MODE_OFF;
11212
11213 int32_t mode = cds_mode;
11214 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011215
Thierry Strudel269c81a2016-10-12 12:13:59 -070011216 /* Manual Convergence AEC Speed is disabled by default*/
11217 float default_aec_speed = 0;
11218 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11219
11220 /* Manual Convergence AWB Speed is disabled by default*/
11221 float default_awb_speed = 0;
11222 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11223
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011224 // Set instant AEC to normal convergence by default
11225 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11226 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11227
Shuzhen Wang19463d72016-03-08 11:09:52 -080011228 /* hybrid ae */
11229 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11230
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011231 if (gExposeEnableZslKey) {
11232 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11233 }
11234
Thierry Strudel3d639192016-09-09 11:52:26 -070011235 mDefaultMetadata[type] = settings.release();
11236
11237 return mDefaultMetadata[type];
11238}
11239
11240/*===========================================================================
11241 * FUNCTION : setFrameParameters
11242 *
11243 * DESCRIPTION: set parameters per frame as requested in the metadata from
11244 * framework
11245 *
11246 * PARAMETERS :
11247 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011248 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011249 * @blob_request: Whether this request is a blob request or not
11250 *
11251 * RETURN : success: NO_ERROR
11252 * failure:
11253 *==========================================================================*/
11254int QCamera3HardwareInterface::setFrameParameters(
11255 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011256 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011257 int blob_request,
11258 uint32_t snapshotStreamId)
11259{
11260 /*translate from camera_metadata_t type to parm_type_t*/
11261 int rc = 0;
11262 int32_t hal_version = CAM_HAL_V3;
11263
11264 clear_metadata_buffer(mParameters);
11265 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11266 LOGE("Failed to set hal version in the parameters");
11267 return BAD_VALUE;
11268 }
11269
11270 /*we need to update the frame number in the parameters*/
11271 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11272 request->frame_number)) {
11273 LOGE("Failed to set the frame number in the parameters");
11274 return BAD_VALUE;
11275 }
11276
11277 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011278 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011279 LOGE("Failed to set stream type mask in the parameters");
11280 return BAD_VALUE;
11281 }
11282
11283 if (mUpdateDebugLevel) {
11284 uint32_t dummyDebugLevel = 0;
11285 /* The value of dummyDebugLevel is irrelavent. On
11286 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11287 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11288 dummyDebugLevel)) {
11289 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11290 return BAD_VALUE;
11291 }
11292 mUpdateDebugLevel = false;
11293 }
11294
11295 if(request->settings != NULL){
11296 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11297 if (blob_request)
11298 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11299 }
11300
11301 return rc;
11302}
11303
11304/*===========================================================================
11305 * FUNCTION : setReprocParameters
11306 *
11307 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11308 * return it.
11309 *
11310 * PARAMETERS :
11311 * @request : request that needs to be serviced
11312 *
11313 * RETURN : success: NO_ERROR
11314 * failure:
11315 *==========================================================================*/
11316int32_t QCamera3HardwareInterface::setReprocParameters(
11317 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11318 uint32_t snapshotStreamId)
11319{
11320 /*translate from camera_metadata_t type to parm_type_t*/
11321 int rc = 0;
11322
11323 if (NULL == request->settings){
11324 LOGE("Reprocess settings cannot be NULL");
11325 return BAD_VALUE;
11326 }
11327
11328 if (NULL == reprocParam) {
11329 LOGE("Invalid reprocessing metadata buffer");
11330 return BAD_VALUE;
11331 }
11332 clear_metadata_buffer(reprocParam);
11333
11334 /*we need to update the frame number in the parameters*/
11335 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11336 request->frame_number)) {
11337 LOGE("Failed to set the frame number in the parameters");
11338 return BAD_VALUE;
11339 }
11340
11341 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11342 if (rc < 0) {
11343 LOGE("Failed to translate reproc request");
11344 return rc;
11345 }
11346
11347 CameraMetadata frame_settings;
11348 frame_settings = request->settings;
11349 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11350 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11351 int32_t *crop_count =
11352 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11353 int32_t *crop_data =
11354 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11355 int32_t *roi_map =
11356 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11357 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11358 cam_crop_data_t crop_meta;
11359 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11360 crop_meta.num_of_streams = 1;
11361 crop_meta.crop_info[0].crop.left = crop_data[0];
11362 crop_meta.crop_info[0].crop.top = crop_data[1];
11363 crop_meta.crop_info[0].crop.width = crop_data[2];
11364 crop_meta.crop_info[0].crop.height = crop_data[3];
11365
11366 crop_meta.crop_info[0].roi_map.left =
11367 roi_map[0];
11368 crop_meta.crop_info[0].roi_map.top =
11369 roi_map[1];
11370 crop_meta.crop_info[0].roi_map.width =
11371 roi_map[2];
11372 crop_meta.crop_info[0].roi_map.height =
11373 roi_map[3];
11374
11375 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11376 rc = BAD_VALUE;
11377 }
11378 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11379 request->input_buffer->stream,
11380 crop_meta.crop_info[0].crop.left,
11381 crop_meta.crop_info[0].crop.top,
11382 crop_meta.crop_info[0].crop.width,
11383 crop_meta.crop_info[0].crop.height);
11384 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11385 request->input_buffer->stream,
11386 crop_meta.crop_info[0].roi_map.left,
11387 crop_meta.crop_info[0].roi_map.top,
11388 crop_meta.crop_info[0].roi_map.width,
11389 crop_meta.crop_info[0].roi_map.height);
11390 } else {
11391 LOGE("Invalid reprocess crop count %d!", *crop_count);
11392 }
11393 } else {
11394 LOGE("No crop data from matching output stream");
11395 }
11396
11397 /* These settings are not needed for regular requests so handle them specially for
11398 reprocess requests; information needed for EXIF tags */
11399 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11400 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11401 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11402 if (NAME_NOT_FOUND != val) {
11403 uint32_t flashMode = (uint32_t)val;
11404 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11405 rc = BAD_VALUE;
11406 }
11407 } else {
11408 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11409 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11410 }
11411 } else {
11412 LOGH("No flash mode in reprocess settings");
11413 }
11414
11415 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11416 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11417 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11418 rc = BAD_VALUE;
11419 }
11420 } else {
11421 LOGH("No flash state in reprocess settings");
11422 }
11423
11424 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11425 uint8_t *reprocessFlags =
11426 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11427 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11428 *reprocessFlags)) {
11429 rc = BAD_VALUE;
11430 }
11431 }
11432
Thierry Strudel54dc9782017-02-15 12:12:10 -080011433 // Add exif debug data to internal metadata
11434 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11435 mm_jpeg_debug_exif_params_t *debug_params =
11436 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11437 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11438 // AE
11439 if (debug_params->ae_debug_params_valid == TRUE) {
11440 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11441 debug_params->ae_debug_params);
11442 }
11443 // AWB
11444 if (debug_params->awb_debug_params_valid == TRUE) {
11445 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11446 debug_params->awb_debug_params);
11447 }
11448 // AF
11449 if (debug_params->af_debug_params_valid == TRUE) {
11450 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11451 debug_params->af_debug_params);
11452 }
11453 // ASD
11454 if (debug_params->asd_debug_params_valid == TRUE) {
11455 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11456 debug_params->asd_debug_params);
11457 }
11458 // Stats
11459 if (debug_params->stats_debug_params_valid == TRUE) {
11460 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11461 debug_params->stats_debug_params);
11462 }
11463 // BE Stats
11464 if (debug_params->bestats_debug_params_valid == TRUE) {
11465 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11466 debug_params->bestats_debug_params);
11467 }
11468 // BHIST
11469 if (debug_params->bhist_debug_params_valid == TRUE) {
11470 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11471 debug_params->bhist_debug_params);
11472 }
11473 // 3A Tuning
11474 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11475 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11476 debug_params->q3a_tuning_debug_params);
11477 }
11478 }
11479
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011480 // Add metadata which reprocess needs
11481 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11482 cam_reprocess_info_t *repro_info =
11483 (cam_reprocess_info_t *)frame_settings.find
11484 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011485 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011486 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011487 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011488 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011489 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011490 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011491 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011492 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011493 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011494 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011495 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011496 repro_info->pipeline_flip);
11497 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11498 repro_info->af_roi);
11499 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11500 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011501 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11502 CAM_INTF_PARM_ROTATION metadata then has been added in
11503 translateToHalMetadata. HAL need to keep this new rotation
11504 metadata. Otherwise, the old rotation info saved in the vendor tag
11505 would be used */
11506 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11507 CAM_INTF_PARM_ROTATION, reprocParam) {
11508 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11509 } else {
11510 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011511 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011512 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011513 }
11514
11515 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11516 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11517 roi.width and roi.height would be the final JPEG size.
11518 For now, HAL only checks this for reprocess request */
11519 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11520 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11521 uint8_t *enable =
11522 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11523 if (*enable == TRUE) {
11524 int32_t *crop_data =
11525 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11526 cam_stream_crop_info_t crop_meta;
11527 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11528 crop_meta.stream_id = 0;
11529 crop_meta.crop.left = crop_data[0];
11530 crop_meta.crop.top = crop_data[1];
11531 crop_meta.crop.width = crop_data[2];
11532 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011533 // The JPEG crop roi should match cpp output size
11534 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11535 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11536 crop_meta.roi_map.left = 0;
11537 crop_meta.roi_map.top = 0;
11538 crop_meta.roi_map.width = cpp_crop->crop.width;
11539 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011540 }
11541 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11542 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011543 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011544 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011545 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11546 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011547 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011548 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11549
11550 // Add JPEG scale information
11551 cam_dimension_t scale_dim;
11552 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11553 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11554 int32_t *roi =
11555 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11556 scale_dim.width = roi[2];
11557 scale_dim.height = roi[3];
11558 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11559 scale_dim);
11560 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11561 scale_dim.width, scale_dim.height, mCameraId);
11562 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011563 }
11564 }
11565
11566 return rc;
11567}
11568
11569/*===========================================================================
11570 * FUNCTION : saveRequestSettings
11571 *
11572 * DESCRIPTION: Add any settings that might have changed to the request settings
11573 * and save the settings to be applied on the frame
11574 *
11575 * PARAMETERS :
11576 * @jpegMetadata : the extracted and/or modified jpeg metadata
11577 * @request : request with initial settings
11578 *
11579 * RETURN :
11580 * camera_metadata_t* : pointer to the saved request settings
11581 *==========================================================================*/
11582camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11583 const CameraMetadata &jpegMetadata,
11584 camera3_capture_request_t *request)
11585{
11586 camera_metadata_t *resultMetadata;
11587 CameraMetadata camMetadata;
11588 camMetadata = request->settings;
11589
11590 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11591 int32_t thumbnail_size[2];
11592 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11593 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11594 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11595 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11596 }
11597
11598 if (request->input_buffer != NULL) {
11599 uint8_t reprocessFlags = 1;
11600 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11601 (uint8_t*)&reprocessFlags,
11602 sizeof(reprocessFlags));
11603 }
11604
11605 resultMetadata = camMetadata.release();
11606 return resultMetadata;
11607}
11608
11609/*===========================================================================
11610 * FUNCTION : setHalFpsRange
11611 *
11612 * DESCRIPTION: set FPS range parameter
11613 *
11614 *
11615 * PARAMETERS :
11616 * @settings : Metadata from framework
11617 * @hal_metadata: Metadata buffer
11618 *
11619 *
11620 * RETURN : success: NO_ERROR
11621 * failure:
11622 *==========================================================================*/
11623int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11624 metadata_buffer_t *hal_metadata)
11625{
11626 int32_t rc = NO_ERROR;
11627 cam_fps_range_t fps_range;
11628 fps_range.min_fps = (float)
11629 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11630 fps_range.max_fps = (float)
11631 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11632 fps_range.video_min_fps = fps_range.min_fps;
11633 fps_range.video_max_fps = fps_range.max_fps;
11634
11635 LOGD("aeTargetFpsRange fps: [%f %f]",
11636 fps_range.min_fps, fps_range.max_fps);
11637 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11638 * follows:
11639 * ---------------------------------------------------------------|
11640 * Video stream is absent in configure_streams |
11641 * (Camcorder preview before the first video record |
11642 * ---------------------------------------------------------------|
11643 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11644 * | | | vid_min/max_fps|
11645 * ---------------------------------------------------------------|
11646 * NO | [ 30, 240] | 240 | [240, 240] |
11647 * |-------------|-------------|----------------|
11648 * | [240, 240] | 240 | [240, 240] |
11649 * ---------------------------------------------------------------|
11650 * Video stream is present in configure_streams |
11651 * ---------------------------------------------------------------|
11652 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11653 * | | | vid_min/max_fps|
11654 * ---------------------------------------------------------------|
11655 * NO | [ 30, 240] | 240 | [240, 240] |
11656 * (camcorder prev |-------------|-------------|----------------|
11657 * after video rec | [240, 240] | 240 | [240, 240] |
11658 * is stopped) | | | |
11659 * ---------------------------------------------------------------|
11660 * YES | [ 30, 240] | 240 | [240, 240] |
11661 * |-------------|-------------|----------------|
11662 * | [240, 240] | 240 | [240, 240] |
11663 * ---------------------------------------------------------------|
11664 * When Video stream is absent in configure_streams,
11665 * preview fps = sensor_fps / batchsize
11666 * Eg: for 240fps at batchSize 4, preview = 60fps
11667 * for 120fps at batchSize 4, preview = 30fps
11668 *
11669 * When video stream is present in configure_streams, preview fps is as per
11670 * the ratio of preview buffers to video buffers requested in process
11671 * capture request
11672 */
11673 mBatchSize = 0;
11674 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11675 fps_range.min_fps = fps_range.video_max_fps;
11676 fps_range.video_min_fps = fps_range.video_max_fps;
11677 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11678 fps_range.max_fps);
11679 if (NAME_NOT_FOUND != val) {
11680 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11681 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11682 return BAD_VALUE;
11683 }
11684
11685 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11686 /* If batchmode is currently in progress and the fps changes,
11687 * set the flag to restart the sensor */
11688 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11689 (mHFRVideoFps != fps_range.max_fps)) {
11690 mNeedSensorRestart = true;
11691 }
11692 mHFRVideoFps = fps_range.max_fps;
11693 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11694 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11695 mBatchSize = MAX_HFR_BATCH_SIZE;
11696 }
11697 }
11698 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11699
11700 }
11701 } else {
11702 /* HFR mode is session param in backend/ISP. This should be reset when
11703 * in non-HFR mode */
11704 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11705 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11706 return BAD_VALUE;
11707 }
11708 }
11709 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11710 return BAD_VALUE;
11711 }
11712 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11713 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11714 return rc;
11715}
11716
11717/*===========================================================================
11718 * FUNCTION : translateToHalMetadata
11719 *
11720 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11721 *
11722 *
11723 * PARAMETERS :
11724 * @request : request sent from framework
11725 *
11726 *
11727 * RETURN : success: NO_ERROR
11728 * failure:
11729 *==========================================================================*/
11730int QCamera3HardwareInterface::translateToHalMetadata
11731 (const camera3_capture_request_t *request,
11732 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011733 uint32_t snapshotStreamId) {
11734 if (request == nullptr || hal_metadata == nullptr) {
11735 return BAD_VALUE;
11736 }
11737
11738 int64_t minFrameDuration = getMinFrameDuration(request);
11739
11740 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11741 minFrameDuration);
11742}
11743
11744int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11745 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11746 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11747
Thierry Strudel3d639192016-09-09 11:52:26 -070011748 int rc = 0;
11749 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011750 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011751
11752 /* Do not change the order of the following list unless you know what you are
11753 * doing.
11754 * The order is laid out in such a way that parameters in the front of the table
11755 * may be used to override the parameters later in the table. Examples are:
11756 * 1. META_MODE should precede AEC/AWB/AF MODE
11757 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11758 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11759 * 4. Any mode should precede it's corresponding settings
11760 */
11761 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11762 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11763 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11764 rc = BAD_VALUE;
11765 }
11766 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11767 if (rc != NO_ERROR) {
11768 LOGE("extractSceneMode failed");
11769 }
11770 }
11771
11772 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11773 uint8_t fwk_aeMode =
11774 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11775 uint8_t aeMode;
11776 int32_t redeye;
11777
11778 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11779 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011780 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11781 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011782 } else {
11783 aeMode = CAM_AE_MODE_ON;
11784 }
11785 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11786 redeye = 1;
11787 } else {
11788 redeye = 0;
11789 }
11790
11791 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11792 fwk_aeMode);
11793 if (NAME_NOT_FOUND != val) {
11794 int32_t flashMode = (int32_t)val;
11795 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11796 }
11797
11798 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11799 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11800 rc = BAD_VALUE;
11801 }
11802 }
11803
11804 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11805 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11806 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11807 fwk_whiteLevel);
11808 if (NAME_NOT_FOUND != val) {
11809 uint8_t whiteLevel = (uint8_t)val;
11810 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11811 rc = BAD_VALUE;
11812 }
11813 }
11814 }
11815
11816 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11817 uint8_t fwk_cacMode =
11818 frame_settings.find(
11819 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11820 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11821 fwk_cacMode);
11822 if (NAME_NOT_FOUND != val) {
11823 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11824 bool entryAvailable = FALSE;
11825 // Check whether Frameworks set CAC mode is supported in device or not
11826 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11827 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11828 entryAvailable = TRUE;
11829 break;
11830 }
11831 }
11832 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11833 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11834 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11835 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11836 if (entryAvailable == FALSE) {
11837 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11838 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11839 } else {
11840 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11841 // High is not supported and so set the FAST as spec say's underlying
11842 // device implementation can be the same for both modes.
11843 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11844 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11845 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11846 // in order to avoid the fps drop due to high quality
11847 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11848 } else {
11849 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11850 }
11851 }
11852 }
11853 LOGD("Final cacMode is %d", cacMode);
11854 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11855 rc = BAD_VALUE;
11856 }
11857 } else {
11858 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11859 }
11860 }
11861
Thierry Strudel2896d122017-02-23 19:18:03 -080011862 char af_value[PROPERTY_VALUE_MAX];
11863 property_get("persist.camera.af.infinity", af_value, "0");
11864
Jason Lee84ae9972017-02-24 13:24:24 -080011865 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011866 if (atoi(af_value) == 0) {
11867 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011868 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011869 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11870 fwk_focusMode);
11871 if (NAME_NOT_FOUND != val) {
11872 uint8_t focusMode = (uint8_t)val;
11873 LOGD("set focus mode %d", focusMode);
11874 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11875 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11876 rc = BAD_VALUE;
11877 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011878 }
11879 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011880 } else {
11881 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11882 LOGE("Focus forced to infinity %d", focusMode);
11883 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11884 rc = BAD_VALUE;
11885 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011886 }
11887
Jason Lee84ae9972017-02-24 13:24:24 -080011888 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11889 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011890 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11891 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11892 focalDistance)) {
11893 rc = BAD_VALUE;
11894 }
11895 }
11896
11897 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11898 uint8_t fwk_antibandingMode =
11899 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11900 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11901 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11902 if (NAME_NOT_FOUND != val) {
11903 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011904 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11905 if (m60HzZone) {
11906 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11907 } else {
11908 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11909 }
11910 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011911 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11912 hal_antibandingMode)) {
11913 rc = BAD_VALUE;
11914 }
11915 }
11916 }
11917
11918 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11919 int32_t expCompensation = frame_settings.find(
11920 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11921 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11922 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11923 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11924 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011925 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011926 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11927 expCompensation)) {
11928 rc = BAD_VALUE;
11929 }
11930 }
11931
11932 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11933 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11934 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11935 rc = BAD_VALUE;
11936 }
11937 }
11938 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11939 rc = setHalFpsRange(frame_settings, hal_metadata);
11940 if (rc != NO_ERROR) {
11941 LOGE("setHalFpsRange failed");
11942 }
11943 }
11944
11945 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11946 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11947 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11948 rc = BAD_VALUE;
11949 }
11950 }
11951
11952 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11953 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11954 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11955 fwk_effectMode);
11956 if (NAME_NOT_FOUND != val) {
11957 uint8_t effectMode = (uint8_t)val;
11958 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11959 rc = BAD_VALUE;
11960 }
11961 }
11962 }
11963
11964 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11965 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11966 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11967 colorCorrectMode)) {
11968 rc = BAD_VALUE;
11969 }
11970 }
11971
11972 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11973 cam_color_correct_gains_t colorCorrectGains;
11974 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11975 colorCorrectGains.gains[i] =
11976 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11977 }
11978 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11979 colorCorrectGains)) {
11980 rc = BAD_VALUE;
11981 }
11982 }
11983
11984 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11985 cam_color_correct_matrix_t colorCorrectTransform;
11986 cam_rational_type_t transform_elem;
11987 size_t num = 0;
11988 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11989 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11990 transform_elem.numerator =
11991 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11992 transform_elem.denominator =
11993 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11994 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11995 num++;
11996 }
11997 }
11998 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11999 colorCorrectTransform)) {
12000 rc = BAD_VALUE;
12001 }
12002 }
12003
12004 cam_trigger_t aecTrigger;
12005 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12006 aecTrigger.trigger_id = -1;
12007 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12008 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12009 aecTrigger.trigger =
12010 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12011 aecTrigger.trigger_id =
12012 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12013 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12014 aecTrigger)) {
12015 rc = BAD_VALUE;
12016 }
12017 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12018 aecTrigger.trigger, aecTrigger.trigger_id);
12019 }
12020
12021 /*af_trigger must come with a trigger id*/
12022 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12023 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12024 cam_trigger_t af_trigger;
12025 af_trigger.trigger =
12026 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12027 af_trigger.trigger_id =
12028 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12029 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12030 rc = BAD_VALUE;
12031 }
12032 LOGD("AfTrigger: %d AfTriggerID: %d",
12033 af_trigger.trigger, af_trigger.trigger_id);
12034 }
12035
12036 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12037 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12038 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12039 rc = BAD_VALUE;
12040 }
12041 }
12042 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12043 cam_edge_application_t edge_application;
12044 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012045
Thierry Strudel3d639192016-09-09 11:52:26 -070012046 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12047 edge_application.sharpness = 0;
12048 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012049 edge_application.sharpness =
12050 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12051 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12052 int32_t sharpness =
12053 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12054 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12055 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12056 LOGD("Setting edge mode sharpness %d", sharpness);
12057 edge_application.sharpness = sharpness;
12058 }
12059 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012060 }
12061 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12062 rc = BAD_VALUE;
12063 }
12064 }
12065
12066 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12067 int32_t respectFlashMode = 1;
12068 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12069 uint8_t fwk_aeMode =
12070 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012071 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12072 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12073 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012074 respectFlashMode = 0;
12075 LOGH("AE Mode controls flash, ignore android.flash.mode");
12076 }
12077 }
12078 if (respectFlashMode) {
12079 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12080 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12081 LOGH("flash mode after mapping %d", val);
12082 // To check: CAM_INTF_META_FLASH_MODE usage
12083 if (NAME_NOT_FOUND != val) {
12084 uint8_t flashMode = (uint8_t)val;
12085 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12086 rc = BAD_VALUE;
12087 }
12088 }
12089 }
12090 }
12091
12092 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12093 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12094 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12095 rc = BAD_VALUE;
12096 }
12097 }
12098
12099 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12100 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12101 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12102 flashFiringTime)) {
12103 rc = BAD_VALUE;
12104 }
12105 }
12106
12107 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12108 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12109 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12110 hotPixelMode)) {
12111 rc = BAD_VALUE;
12112 }
12113 }
12114
12115 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12116 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12117 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12118 lensAperture)) {
12119 rc = BAD_VALUE;
12120 }
12121 }
12122
12123 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12124 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12125 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12126 filterDensity)) {
12127 rc = BAD_VALUE;
12128 }
12129 }
12130
12131 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12132 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12133 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12134 focalLength)) {
12135 rc = BAD_VALUE;
12136 }
12137 }
12138
12139 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12140 uint8_t optStabMode =
12141 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12142 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12143 optStabMode)) {
12144 rc = BAD_VALUE;
12145 }
12146 }
12147
12148 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12149 uint8_t videoStabMode =
12150 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12151 LOGD("videoStabMode from APP = %d", videoStabMode);
12152 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12153 videoStabMode)) {
12154 rc = BAD_VALUE;
12155 }
12156 }
12157
12158
12159 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12160 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12161 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12162 noiseRedMode)) {
12163 rc = BAD_VALUE;
12164 }
12165 }
12166
12167 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12168 float reprocessEffectiveExposureFactor =
12169 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12170 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12171 reprocessEffectiveExposureFactor)) {
12172 rc = BAD_VALUE;
12173 }
12174 }
12175
12176 cam_crop_region_t scalerCropRegion;
12177 bool scalerCropSet = false;
12178 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12179 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12180 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12181 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12182 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12183
12184 // Map coordinate system from active array to sensor output.
12185 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12186 scalerCropRegion.width, scalerCropRegion.height);
12187
12188 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12189 scalerCropRegion)) {
12190 rc = BAD_VALUE;
12191 }
12192 scalerCropSet = true;
12193 }
12194
12195 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12196 int64_t sensorExpTime =
12197 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12198 LOGD("setting sensorExpTime %lld", sensorExpTime);
12199 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12200 sensorExpTime)) {
12201 rc = BAD_VALUE;
12202 }
12203 }
12204
12205 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12206 int64_t sensorFrameDuration =
12207 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012208 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12209 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12210 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12211 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12212 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12213 sensorFrameDuration)) {
12214 rc = BAD_VALUE;
12215 }
12216 }
12217
12218 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12219 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12220 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12221 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12222 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12223 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12224 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12225 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12226 sensorSensitivity)) {
12227 rc = BAD_VALUE;
12228 }
12229 }
12230
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012231#ifndef USE_HAL_3_3
12232 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12233 int32_t ispSensitivity =
12234 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12235 if (ispSensitivity <
12236 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12237 ispSensitivity =
12238 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12239 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12240 }
12241 if (ispSensitivity >
12242 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12243 ispSensitivity =
12244 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12245 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12246 }
12247 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12248 ispSensitivity)) {
12249 rc = BAD_VALUE;
12250 }
12251 }
12252#endif
12253
Thierry Strudel3d639192016-09-09 11:52:26 -070012254 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12255 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12256 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12257 rc = BAD_VALUE;
12258 }
12259 }
12260
12261 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12262 uint8_t fwk_facedetectMode =
12263 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12264
12265 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12266 fwk_facedetectMode);
12267
12268 if (NAME_NOT_FOUND != val) {
12269 uint8_t facedetectMode = (uint8_t)val;
12270 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12271 facedetectMode)) {
12272 rc = BAD_VALUE;
12273 }
12274 }
12275 }
12276
Thierry Strudel54dc9782017-02-15 12:12:10 -080012277 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012278 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012279 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012280 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12281 histogramMode)) {
12282 rc = BAD_VALUE;
12283 }
12284 }
12285
12286 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12287 uint8_t sharpnessMapMode =
12288 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12289 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12290 sharpnessMapMode)) {
12291 rc = BAD_VALUE;
12292 }
12293 }
12294
12295 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12296 uint8_t tonemapMode =
12297 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12298 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12299 rc = BAD_VALUE;
12300 }
12301 }
12302 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12303 /*All tonemap channels will have the same number of points*/
12304 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12305 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12306 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12307 cam_rgb_tonemap_curves tonemapCurves;
12308 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12309 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12310 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12311 tonemapCurves.tonemap_points_cnt,
12312 CAM_MAX_TONEMAP_CURVE_SIZE);
12313 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12314 }
12315
12316 /* ch0 = G*/
12317 size_t point = 0;
12318 cam_tonemap_curve_t tonemapCurveGreen;
12319 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12320 for (size_t j = 0; j < 2; j++) {
12321 tonemapCurveGreen.tonemap_points[i][j] =
12322 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12323 point++;
12324 }
12325 }
12326 tonemapCurves.curves[0] = tonemapCurveGreen;
12327
12328 /* ch 1 = B */
12329 point = 0;
12330 cam_tonemap_curve_t tonemapCurveBlue;
12331 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12332 for (size_t j = 0; j < 2; j++) {
12333 tonemapCurveBlue.tonemap_points[i][j] =
12334 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12335 point++;
12336 }
12337 }
12338 tonemapCurves.curves[1] = tonemapCurveBlue;
12339
12340 /* ch 2 = R */
12341 point = 0;
12342 cam_tonemap_curve_t tonemapCurveRed;
12343 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12344 for (size_t j = 0; j < 2; j++) {
12345 tonemapCurveRed.tonemap_points[i][j] =
12346 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12347 point++;
12348 }
12349 }
12350 tonemapCurves.curves[2] = tonemapCurveRed;
12351
12352 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12353 tonemapCurves)) {
12354 rc = BAD_VALUE;
12355 }
12356 }
12357
12358 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12359 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12360 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12361 captureIntent)) {
12362 rc = BAD_VALUE;
12363 }
12364 }
12365
12366 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12367 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12368 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12369 blackLevelLock)) {
12370 rc = BAD_VALUE;
12371 }
12372 }
12373
12374 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12375 uint8_t lensShadingMapMode =
12376 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12377 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12378 lensShadingMapMode)) {
12379 rc = BAD_VALUE;
12380 }
12381 }
12382
12383 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12384 cam_area_t roi;
12385 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012386 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012387
12388 // Map coordinate system from active array to sensor output.
12389 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12390 roi.rect.height);
12391
12392 if (scalerCropSet) {
12393 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12394 }
12395 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12396 rc = BAD_VALUE;
12397 }
12398 }
12399
12400 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12401 cam_area_t roi;
12402 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012403 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012404
12405 // Map coordinate system from active array to sensor output.
12406 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12407 roi.rect.height);
12408
12409 if (scalerCropSet) {
12410 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12411 }
12412 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12413 rc = BAD_VALUE;
12414 }
12415 }
12416
12417 // CDS for non-HFR non-video mode
12418 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12419 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12420 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12421 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12422 LOGE("Invalid CDS mode %d!", *fwk_cds);
12423 } else {
12424 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12425 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12426 rc = BAD_VALUE;
12427 }
12428 }
12429 }
12430
Thierry Strudel04e026f2016-10-10 11:27:36 -070012431 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012432 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012433 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012434 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12435 }
12436 if (m_bVideoHdrEnabled)
12437 vhdr = CAM_VIDEO_HDR_MODE_ON;
12438
Thierry Strudel54dc9782017-02-15 12:12:10 -080012439 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12440
12441 if(vhdr != curr_hdr_state)
12442 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12443
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012444 rc = setVideoHdrMode(mParameters, vhdr);
12445 if (rc != NO_ERROR) {
12446 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012447 }
12448
12449 //IR
12450 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12451 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12452 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012453 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12454 uint8_t isIRon = 0;
12455
12456 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012457 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12458 LOGE("Invalid IR mode %d!", fwk_ir);
12459 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012460 if(isIRon != curr_ir_state )
12461 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12462
Thierry Strudel04e026f2016-10-10 11:27:36 -070012463 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12464 CAM_INTF_META_IR_MODE, fwk_ir)) {
12465 rc = BAD_VALUE;
12466 }
12467 }
12468 }
12469
Thierry Strudel54dc9782017-02-15 12:12:10 -080012470 //Binning Correction Mode
12471 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12472 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12473 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12474 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12475 || (0 > fwk_binning_correction)) {
12476 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12477 } else {
12478 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12479 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12480 rc = BAD_VALUE;
12481 }
12482 }
12483 }
12484
Thierry Strudel269c81a2016-10-12 12:13:59 -070012485 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12486 float aec_speed;
12487 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12488 LOGD("AEC Speed :%f", aec_speed);
12489 if ( aec_speed < 0 ) {
12490 LOGE("Invalid AEC mode %f!", aec_speed);
12491 } else {
12492 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12493 aec_speed)) {
12494 rc = BAD_VALUE;
12495 }
12496 }
12497 }
12498
12499 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12500 float awb_speed;
12501 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12502 LOGD("AWB Speed :%f", awb_speed);
12503 if ( awb_speed < 0 ) {
12504 LOGE("Invalid AWB mode %f!", awb_speed);
12505 } else {
12506 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12507 awb_speed)) {
12508 rc = BAD_VALUE;
12509 }
12510 }
12511 }
12512
Thierry Strudel3d639192016-09-09 11:52:26 -070012513 // TNR
12514 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12515 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12516 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012517 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012518 cam_denoise_param_t tnr;
12519 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12520 tnr.process_plates =
12521 (cam_denoise_process_type_t)frame_settings.find(
12522 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12523 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012524
12525 if(b_TnrRequested != curr_tnr_state)
12526 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12527
Thierry Strudel3d639192016-09-09 11:52:26 -070012528 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12529 rc = BAD_VALUE;
12530 }
12531 }
12532
Thierry Strudel54dc9782017-02-15 12:12:10 -080012533 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012534 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012535 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012536 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12537 *exposure_metering_mode)) {
12538 rc = BAD_VALUE;
12539 }
12540 }
12541
Thierry Strudel3d639192016-09-09 11:52:26 -070012542 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12543 int32_t fwk_testPatternMode =
12544 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12545 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12546 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12547
12548 if (NAME_NOT_FOUND != testPatternMode) {
12549 cam_test_pattern_data_t testPatternData;
12550 memset(&testPatternData, 0, sizeof(testPatternData));
12551 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12552 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12553 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12554 int32_t *fwk_testPatternData =
12555 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12556 testPatternData.r = fwk_testPatternData[0];
12557 testPatternData.b = fwk_testPatternData[3];
12558 switch (gCamCapability[mCameraId]->color_arrangement) {
12559 case CAM_FILTER_ARRANGEMENT_RGGB:
12560 case CAM_FILTER_ARRANGEMENT_GRBG:
12561 testPatternData.gr = fwk_testPatternData[1];
12562 testPatternData.gb = fwk_testPatternData[2];
12563 break;
12564 case CAM_FILTER_ARRANGEMENT_GBRG:
12565 case CAM_FILTER_ARRANGEMENT_BGGR:
12566 testPatternData.gr = fwk_testPatternData[2];
12567 testPatternData.gb = fwk_testPatternData[1];
12568 break;
12569 default:
12570 LOGE("color arrangement %d is not supported",
12571 gCamCapability[mCameraId]->color_arrangement);
12572 break;
12573 }
12574 }
12575 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12576 testPatternData)) {
12577 rc = BAD_VALUE;
12578 }
12579 } else {
12580 LOGE("Invalid framework sensor test pattern mode %d",
12581 fwk_testPatternMode);
12582 }
12583 }
12584
12585 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12586 size_t count = 0;
12587 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12588 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12589 gps_coords.data.d, gps_coords.count, count);
12590 if (gps_coords.count != count) {
12591 rc = BAD_VALUE;
12592 }
12593 }
12594
12595 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12596 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12597 size_t count = 0;
12598 const char *gps_methods_src = (const char *)
12599 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12600 memset(gps_methods, '\0', sizeof(gps_methods));
12601 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12602 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12603 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12604 if (GPS_PROCESSING_METHOD_SIZE != count) {
12605 rc = BAD_VALUE;
12606 }
12607 }
12608
12609 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12610 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12611 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12612 gps_timestamp)) {
12613 rc = BAD_VALUE;
12614 }
12615 }
12616
12617 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12618 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12619 cam_rotation_info_t rotation_info;
12620 if (orientation == 0) {
12621 rotation_info.rotation = ROTATE_0;
12622 } else if (orientation == 90) {
12623 rotation_info.rotation = ROTATE_90;
12624 } else if (orientation == 180) {
12625 rotation_info.rotation = ROTATE_180;
12626 } else if (orientation == 270) {
12627 rotation_info.rotation = ROTATE_270;
12628 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012629 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012630 rotation_info.streamId = snapshotStreamId;
12631 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12632 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12633 rc = BAD_VALUE;
12634 }
12635 }
12636
12637 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12638 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12639 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12640 rc = BAD_VALUE;
12641 }
12642 }
12643
12644 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12645 uint32_t thumb_quality = (uint32_t)
12646 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12647 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12648 thumb_quality)) {
12649 rc = BAD_VALUE;
12650 }
12651 }
12652
12653 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12654 cam_dimension_t dim;
12655 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12656 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12657 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12658 rc = BAD_VALUE;
12659 }
12660 }
12661
12662 // Internal metadata
12663 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12664 size_t count = 0;
12665 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12666 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12667 privatedata.data.i32, privatedata.count, count);
12668 if (privatedata.count != count) {
12669 rc = BAD_VALUE;
12670 }
12671 }
12672
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012673 // ISO/Exposure Priority
12674 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12675 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12676 cam_priority_mode_t mode =
12677 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12678 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12679 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12680 use_iso_exp_pty.previewOnly = FALSE;
12681 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12682 use_iso_exp_pty.value = *ptr;
12683
12684 if(CAM_ISO_PRIORITY == mode) {
12685 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12686 use_iso_exp_pty)) {
12687 rc = BAD_VALUE;
12688 }
12689 }
12690 else {
12691 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12692 use_iso_exp_pty)) {
12693 rc = BAD_VALUE;
12694 }
12695 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012696
12697 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12698 rc = BAD_VALUE;
12699 }
12700 }
12701 } else {
12702 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12703 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012704 }
12705 }
12706
12707 // Saturation
12708 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12709 int32_t* use_saturation =
12710 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12711 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12712 rc = BAD_VALUE;
12713 }
12714 }
12715
Thierry Strudel3d639192016-09-09 11:52:26 -070012716 // EV step
12717 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12718 gCamCapability[mCameraId]->exp_compensation_step)) {
12719 rc = BAD_VALUE;
12720 }
12721
12722 // CDS info
12723 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12724 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12725 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12726
12727 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12728 CAM_INTF_META_CDS_DATA, *cdsData)) {
12729 rc = BAD_VALUE;
12730 }
12731 }
12732
Shuzhen Wang19463d72016-03-08 11:09:52 -080012733 // Hybrid AE
12734 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12735 uint8_t *hybrid_ae = (uint8_t *)
12736 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12737
12738 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12739 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12740 rc = BAD_VALUE;
12741 }
12742 }
12743
Shuzhen Wang14415f52016-11-16 18:26:18 -080012744 // Histogram
12745 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12746 uint8_t histogramMode =
12747 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12748 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12749 histogramMode)) {
12750 rc = BAD_VALUE;
12751 }
12752 }
12753
12754 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12755 int32_t histogramBins =
12756 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12757 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12758 histogramBins)) {
12759 rc = BAD_VALUE;
12760 }
12761 }
12762
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012763 // Tracking AF
12764 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12765 uint8_t trackingAfTrigger =
12766 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12767 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12768 trackingAfTrigger)) {
12769 rc = BAD_VALUE;
12770 }
12771 }
12772
Thierry Strudel3d639192016-09-09 11:52:26 -070012773 return rc;
12774}
12775
12776/*===========================================================================
12777 * FUNCTION : captureResultCb
12778 *
12779 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12780 *
12781 * PARAMETERS :
12782 * @frame : frame information from mm-camera-interface
12783 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12784 * @userdata: userdata
12785 *
12786 * RETURN : NONE
12787 *==========================================================================*/
12788void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12789 camera3_stream_buffer_t *buffer,
12790 uint32_t frame_number, bool isInputBuffer, void *userdata)
12791{
12792 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12793 if (hw == NULL) {
12794 LOGE("Invalid hw %p", hw);
12795 return;
12796 }
12797
12798 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12799 return;
12800}
12801
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012802/*===========================================================================
12803 * FUNCTION : setBufferErrorStatus
12804 *
12805 * DESCRIPTION: Callback handler for channels to report any buffer errors
12806 *
12807 * PARAMETERS :
12808 * @ch : Channel on which buffer error is reported from
12809 * @frame_number : frame number on which buffer error is reported on
12810 * @buffer_status : buffer error status
12811 * @userdata: userdata
12812 *
12813 * RETURN : NONE
12814 *==========================================================================*/
12815void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12816 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12817{
12818 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12819 if (hw == NULL) {
12820 LOGE("Invalid hw %p", hw);
12821 return;
12822 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012823
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012824 hw->setBufferErrorStatus(ch, frame_number, err);
12825 return;
12826}
12827
12828void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12829 uint32_t frameNumber, camera3_buffer_status_t err)
12830{
12831 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12832 pthread_mutex_lock(&mMutex);
12833
12834 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12835 if (req.frame_number != frameNumber)
12836 continue;
12837 for (auto& k : req.mPendingBufferList) {
12838 if(k.stream->priv == ch) {
12839 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12840 }
12841 }
12842 }
12843
12844 pthread_mutex_unlock(&mMutex);
12845 return;
12846}
Thierry Strudel3d639192016-09-09 11:52:26 -070012847/*===========================================================================
12848 * FUNCTION : initialize
12849 *
12850 * DESCRIPTION: Pass framework callback pointers to HAL
12851 *
12852 * PARAMETERS :
12853 *
12854 *
12855 * RETURN : Success : 0
12856 * Failure: -ENODEV
12857 *==========================================================================*/
12858
12859int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12860 const camera3_callback_ops_t *callback_ops)
12861{
12862 LOGD("E");
12863 QCamera3HardwareInterface *hw =
12864 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12865 if (!hw) {
12866 LOGE("NULL camera device");
12867 return -ENODEV;
12868 }
12869
12870 int rc = hw->initialize(callback_ops);
12871 LOGD("X");
12872 return rc;
12873}
12874
12875/*===========================================================================
12876 * FUNCTION : configure_streams
12877 *
12878 * DESCRIPTION:
12879 *
12880 * PARAMETERS :
12881 *
12882 *
12883 * RETURN : Success: 0
12884 * Failure: -EINVAL (if stream configuration is invalid)
12885 * -ENODEV (fatal error)
12886 *==========================================================================*/
12887
12888int QCamera3HardwareInterface::configure_streams(
12889 const struct camera3_device *device,
12890 camera3_stream_configuration_t *stream_list)
12891{
12892 LOGD("E");
12893 QCamera3HardwareInterface *hw =
12894 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12895 if (!hw) {
12896 LOGE("NULL camera device");
12897 return -ENODEV;
12898 }
12899 int rc = hw->configureStreams(stream_list);
12900 LOGD("X");
12901 return rc;
12902}
12903
12904/*===========================================================================
12905 * FUNCTION : construct_default_request_settings
12906 *
12907 * DESCRIPTION: Configure a settings buffer to meet the required use case
12908 *
12909 * PARAMETERS :
12910 *
12911 *
12912 * RETURN : Success: Return valid metadata
12913 * Failure: Return NULL
12914 *==========================================================================*/
12915const camera_metadata_t* QCamera3HardwareInterface::
12916 construct_default_request_settings(const struct camera3_device *device,
12917 int type)
12918{
12919
12920 LOGD("E");
12921 camera_metadata_t* fwk_metadata = NULL;
12922 QCamera3HardwareInterface *hw =
12923 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12924 if (!hw) {
12925 LOGE("NULL camera device");
12926 return NULL;
12927 }
12928
12929 fwk_metadata = hw->translateCapabilityToMetadata(type);
12930
12931 LOGD("X");
12932 return fwk_metadata;
12933}
12934
12935/*===========================================================================
12936 * FUNCTION : process_capture_request
12937 *
12938 * DESCRIPTION:
12939 *
12940 * PARAMETERS :
12941 *
12942 *
12943 * RETURN :
12944 *==========================================================================*/
12945int QCamera3HardwareInterface::process_capture_request(
12946 const struct camera3_device *device,
12947 camera3_capture_request_t *request)
12948{
12949 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012950 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012951 QCamera3HardwareInterface *hw =
12952 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12953 if (!hw) {
12954 LOGE("NULL camera device");
12955 return -EINVAL;
12956 }
12957
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012958 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012959 LOGD("X");
12960 return rc;
12961}
12962
12963/*===========================================================================
12964 * FUNCTION : dump
12965 *
12966 * DESCRIPTION:
12967 *
12968 * PARAMETERS :
12969 *
12970 *
12971 * RETURN :
12972 *==========================================================================*/
12973
12974void QCamera3HardwareInterface::dump(
12975 const struct camera3_device *device, int fd)
12976{
12977 /* Log level property is read when "adb shell dumpsys media.camera" is
12978 called so that the log level can be controlled without restarting
12979 the media server */
12980 getLogLevel();
12981
12982 LOGD("E");
12983 QCamera3HardwareInterface *hw =
12984 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12985 if (!hw) {
12986 LOGE("NULL camera device");
12987 return;
12988 }
12989
12990 hw->dump(fd);
12991 LOGD("X");
12992 return;
12993}
12994
12995/*===========================================================================
12996 * FUNCTION : flush
12997 *
12998 * DESCRIPTION:
12999 *
13000 * PARAMETERS :
13001 *
13002 *
13003 * RETURN :
13004 *==========================================================================*/
13005
13006int QCamera3HardwareInterface::flush(
13007 const struct camera3_device *device)
13008{
13009 int rc;
13010 LOGD("E");
13011 QCamera3HardwareInterface *hw =
13012 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13013 if (!hw) {
13014 LOGE("NULL camera device");
13015 return -EINVAL;
13016 }
13017
13018 pthread_mutex_lock(&hw->mMutex);
13019 // Validate current state
13020 switch (hw->mState) {
13021 case STARTED:
13022 /* valid state */
13023 break;
13024
13025 case ERROR:
13026 pthread_mutex_unlock(&hw->mMutex);
13027 hw->handleCameraDeviceError();
13028 return -ENODEV;
13029
13030 default:
13031 LOGI("Flush returned during state %d", hw->mState);
13032 pthread_mutex_unlock(&hw->mMutex);
13033 return 0;
13034 }
13035 pthread_mutex_unlock(&hw->mMutex);
13036
13037 rc = hw->flush(true /* restart channels */ );
13038 LOGD("X");
13039 return rc;
13040}
13041
13042/*===========================================================================
13043 * FUNCTION : close_camera_device
13044 *
13045 * DESCRIPTION:
13046 *
13047 * PARAMETERS :
13048 *
13049 *
13050 * RETURN :
13051 *==========================================================================*/
13052int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13053{
13054 int ret = NO_ERROR;
13055 QCamera3HardwareInterface *hw =
13056 reinterpret_cast<QCamera3HardwareInterface *>(
13057 reinterpret_cast<camera3_device_t *>(device)->priv);
13058 if (!hw) {
13059 LOGE("NULL camera device");
13060 return BAD_VALUE;
13061 }
13062
13063 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13064 delete hw;
13065 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013066 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013067 return ret;
13068}
13069
13070/*===========================================================================
13071 * FUNCTION : getWaveletDenoiseProcessPlate
13072 *
13073 * DESCRIPTION: query wavelet denoise process plate
13074 *
13075 * PARAMETERS : None
13076 *
13077 * RETURN : WNR prcocess plate value
13078 *==========================================================================*/
13079cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13080{
13081 char prop[PROPERTY_VALUE_MAX];
13082 memset(prop, 0, sizeof(prop));
13083 property_get("persist.denoise.process.plates", prop, "0");
13084 int processPlate = atoi(prop);
13085 switch(processPlate) {
13086 case 0:
13087 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13088 case 1:
13089 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13090 case 2:
13091 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13092 case 3:
13093 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13094 default:
13095 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13096 }
13097}
13098
13099
13100/*===========================================================================
13101 * FUNCTION : getTemporalDenoiseProcessPlate
13102 *
13103 * DESCRIPTION: query temporal denoise process plate
13104 *
13105 * PARAMETERS : None
13106 *
13107 * RETURN : TNR prcocess plate value
13108 *==========================================================================*/
13109cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13110{
13111 char prop[PROPERTY_VALUE_MAX];
13112 memset(prop, 0, sizeof(prop));
13113 property_get("persist.tnr.process.plates", prop, "0");
13114 int processPlate = atoi(prop);
13115 switch(processPlate) {
13116 case 0:
13117 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13118 case 1:
13119 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13120 case 2:
13121 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13122 case 3:
13123 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13124 default:
13125 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13126 }
13127}
13128
13129
13130/*===========================================================================
13131 * FUNCTION : extractSceneMode
13132 *
13133 * DESCRIPTION: Extract scene mode from frameworks set metadata
13134 *
13135 * PARAMETERS :
13136 * @frame_settings: CameraMetadata reference
13137 * @metaMode: ANDROID_CONTORL_MODE
13138 * @hal_metadata: hal metadata structure
13139 *
13140 * RETURN : None
13141 *==========================================================================*/
13142int32_t QCamera3HardwareInterface::extractSceneMode(
13143 const CameraMetadata &frame_settings, uint8_t metaMode,
13144 metadata_buffer_t *hal_metadata)
13145{
13146 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013147 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13148
13149 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13150 LOGD("Ignoring control mode OFF_KEEP_STATE");
13151 return NO_ERROR;
13152 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013153
13154 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13155 camera_metadata_ro_entry entry =
13156 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13157 if (0 == entry.count)
13158 return rc;
13159
13160 uint8_t fwk_sceneMode = entry.data.u8[0];
13161
13162 int val = lookupHalName(SCENE_MODES_MAP,
13163 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13164 fwk_sceneMode);
13165 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013166 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013167 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013168 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013169 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013170
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013171 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13172 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13173 }
13174
13175 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13176 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013177 cam_hdr_param_t hdr_params;
13178 hdr_params.hdr_enable = 1;
13179 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13180 hdr_params.hdr_need_1x = false;
13181 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13182 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13183 rc = BAD_VALUE;
13184 }
13185 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013186
Thierry Strudel3d639192016-09-09 11:52:26 -070013187 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13188 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13189 rc = BAD_VALUE;
13190 }
13191 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013192
13193 if (mForceHdrSnapshot) {
13194 cam_hdr_param_t hdr_params;
13195 hdr_params.hdr_enable = 1;
13196 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13197 hdr_params.hdr_need_1x = false;
13198 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13199 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13200 rc = BAD_VALUE;
13201 }
13202 }
13203
Thierry Strudel3d639192016-09-09 11:52:26 -070013204 return rc;
13205}
13206
13207/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013208 * FUNCTION : setVideoHdrMode
13209 *
13210 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13211 *
13212 * PARAMETERS :
13213 * @hal_metadata: hal metadata structure
13214 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13215 *
13216 * RETURN : None
13217 *==========================================================================*/
13218int32_t QCamera3HardwareInterface::setVideoHdrMode(
13219 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13220{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013221 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13222 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13223 }
13224
13225 LOGE("Invalid Video HDR mode %d!", vhdr);
13226 return BAD_VALUE;
13227}
13228
13229/*===========================================================================
13230 * FUNCTION : setSensorHDR
13231 *
13232 * DESCRIPTION: Enable/disable sensor HDR.
13233 *
13234 * PARAMETERS :
13235 * @hal_metadata: hal metadata structure
13236 * @enable: boolean whether to enable/disable sensor HDR
13237 *
13238 * RETURN : None
13239 *==========================================================================*/
13240int32_t QCamera3HardwareInterface::setSensorHDR(
13241 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13242{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013243 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013244 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13245
13246 if (enable) {
13247 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13248 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13249 #ifdef _LE_CAMERA_
13250 //Default to staggered HDR for IOT
13251 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13252 #else
13253 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13254 #endif
13255 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13256 }
13257
13258 bool isSupported = false;
13259 switch (sensor_hdr) {
13260 case CAM_SENSOR_HDR_IN_SENSOR:
13261 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13262 CAM_QCOM_FEATURE_SENSOR_HDR) {
13263 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013264 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013265 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013266 break;
13267 case CAM_SENSOR_HDR_ZIGZAG:
13268 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13269 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13270 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013271 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013272 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013273 break;
13274 case CAM_SENSOR_HDR_STAGGERED:
13275 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13276 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13277 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013278 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013279 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013280 break;
13281 case CAM_SENSOR_HDR_OFF:
13282 isSupported = true;
13283 LOGD("Turning off sensor HDR");
13284 break;
13285 default:
13286 LOGE("HDR mode %d not supported", sensor_hdr);
13287 rc = BAD_VALUE;
13288 break;
13289 }
13290
13291 if(isSupported) {
13292 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13293 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13294 rc = BAD_VALUE;
13295 } else {
13296 if(!isVideoHdrEnable)
13297 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013298 }
13299 }
13300 return rc;
13301}
13302
13303/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013304 * FUNCTION : needRotationReprocess
13305 *
13306 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13307 *
13308 * PARAMETERS : none
13309 *
13310 * RETURN : true: needed
13311 * false: no need
13312 *==========================================================================*/
13313bool QCamera3HardwareInterface::needRotationReprocess()
13314{
13315 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13316 // current rotation is not zero, and pp has the capability to process rotation
13317 LOGH("need do reprocess for rotation");
13318 return true;
13319 }
13320
13321 return false;
13322}
13323
13324/*===========================================================================
13325 * FUNCTION : needReprocess
13326 *
13327 * DESCRIPTION: if reprocess in needed
13328 *
13329 * PARAMETERS : none
13330 *
13331 * RETURN : true: needed
13332 * false: no need
13333 *==========================================================================*/
13334bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13335{
13336 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13337 // TODO: add for ZSL HDR later
13338 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13339 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13340 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13341 return true;
13342 } else {
13343 LOGH("already post processed frame");
13344 return false;
13345 }
13346 }
13347 return needRotationReprocess();
13348}
13349
13350/*===========================================================================
13351 * FUNCTION : needJpegExifRotation
13352 *
13353 * DESCRIPTION: if rotation from jpeg is needed
13354 *
13355 * PARAMETERS : none
13356 *
13357 * RETURN : true: needed
13358 * false: no need
13359 *==========================================================================*/
13360bool QCamera3HardwareInterface::needJpegExifRotation()
13361{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013362 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013363 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13364 LOGD("Need use Jpeg EXIF Rotation");
13365 return true;
13366 }
13367 return false;
13368}
13369
13370/*===========================================================================
13371 * FUNCTION : addOfflineReprocChannel
13372 *
13373 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13374 * coming from input channel
13375 *
13376 * PARAMETERS :
13377 * @config : reprocess configuration
13378 * @inputChHandle : pointer to the input (source) channel
13379 *
13380 *
13381 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13382 *==========================================================================*/
13383QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13384 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13385{
13386 int32_t rc = NO_ERROR;
13387 QCamera3ReprocessChannel *pChannel = NULL;
13388
13389 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013390 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13391 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013392 if (NULL == pChannel) {
13393 LOGE("no mem for reprocess channel");
13394 return NULL;
13395 }
13396
13397 rc = pChannel->initialize(IS_TYPE_NONE);
13398 if (rc != NO_ERROR) {
13399 LOGE("init reprocess channel failed, ret = %d", rc);
13400 delete pChannel;
13401 return NULL;
13402 }
13403
13404 // pp feature config
13405 cam_pp_feature_config_t pp_config;
13406 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13407
13408 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13409 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13410 & CAM_QCOM_FEATURE_DSDN) {
13411 //Use CPP CDS incase h/w supports it.
13412 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13413 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13414 }
13415 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13416 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13417 }
13418
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013419 if (config.hdr_param.hdr_enable) {
13420 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13421 pp_config.hdr_param = config.hdr_param;
13422 }
13423
13424 if (mForceHdrSnapshot) {
13425 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13426 pp_config.hdr_param.hdr_enable = 1;
13427 pp_config.hdr_param.hdr_need_1x = 0;
13428 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13429 }
13430
Thierry Strudel3d639192016-09-09 11:52:26 -070013431 rc = pChannel->addReprocStreamsFromSource(pp_config,
13432 config,
13433 IS_TYPE_NONE,
13434 mMetadataChannel);
13435
13436 if (rc != NO_ERROR) {
13437 delete pChannel;
13438 return NULL;
13439 }
13440 return pChannel;
13441}
13442
13443/*===========================================================================
13444 * FUNCTION : getMobicatMask
13445 *
13446 * DESCRIPTION: returns mobicat mask
13447 *
13448 * PARAMETERS : none
13449 *
13450 * RETURN : mobicat mask
13451 *
13452 *==========================================================================*/
13453uint8_t QCamera3HardwareInterface::getMobicatMask()
13454{
13455 return m_MobicatMask;
13456}
13457
13458/*===========================================================================
13459 * FUNCTION : setMobicat
13460 *
13461 * DESCRIPTION: set Mobicat on/off.
13462 *
13463 * PARAMETERS :
13464 * @params : none
13465 *
13466 * RETURN : int32_t type of status
13467 * NO_ERROR -- success
13468 * none-zero failure code
13469 *==========================================================================*/
13470int32_t QCamera3HardwareInterface::setMobicat()
13471{
13472 char value [PROPERTY_VALUE_MAX];
13473 property_get("persist.camera.mobicat", value, "0");
13474 int32_t ret = NO_ERROR;
13475 uint8_t enableMobi = (uint8_t)atoi(value);
13476
13477 if (enableMobi) {
13478 tune_cmd_t tune_cmd;
13479 tune_cmd.type = SET_RELOAD_CHROMATIX;
13480 tune_cmd.module = MODULE_ALL;
13481 tune_cmd.value = TRUE;
13482 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13483 CAM_INTF_PARM_SET_VFE_COMMAND,
13484 tune_cmd);
13485
13486 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13487 CAM_INTF_PARM_SET_PP_COMMAND,
13488 tune_cmd);
13489 }
13490 m_MobicatMask = enableMobi;
13491
13492 return ret;
13493}
13494
13495/*===========================================================================
13496* FUNCTION : getLogLevel
13497*
13498* DESCRIPTION: Reads the log level property into a variable
13499*
13500* PARAMETERS :
13501* None
13502*
13503* RETURN :
13504* None
13505*==========================================================================*/
13506void QCamera3HardwareInterface::getLogLevel()
13507{
13508 char prop[PROPERTY_VALUE_MAX];
13509 uint32_t globalLogLevel = 0;
13510
13511 property_get("persist.camera.hal.debug", prop, "0");
13512 int val = atoi(prop);
13513 if (0 <= val) {
13514 gCamHal3LogLevel = (uint32_t)val;
13515 }
13516
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013517 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013518 gKpiDebugLevel = atoi(prop);
13519
13520 property_get("persist.camera.global.debug", prop, "0");
13521 val = atoi(prop);
13522 if (0 <= val) {
13523 globalLogLevel = (uint32_t)val;
13524 }
13525
13526 /* Highest log level among hal.logs and global.logs is selected */
13527 if (gCamHal3LogLevel < globalLogLevel)
13528 gCamHal3LogLevel = globalLogLevel;
13529
13530 return;
13531}
13532
13533/*===========================================================================
13534 * FUNCTION : validateStreamRotations
13535 *
13536 * DESCRIPTION: Check if the rotations requested are supported
13537 *
13538 * PARAMETERS :
13539 * @stream_list : streams to be configured
13540 *
13541 * RETURN : NO_ERROR on success
13542 * -EINVAL on failure
13543 *
13544 *==========================================================================*/
13545int QCamera3HardwareInterface::validateStreamRotations(
13546 camera3_stream_configuration_t *streamList)
13547{
13548 int rc = NO_ERROR;
13549
13550 /*
13551 * Loop through all streams requested in configuration
13552 * Check if unsupported rotations have been requested on any of them
13553 */
13554 for (size_t j = 0; j < streamList->num_streams; j++){
13555 camera3_stream_t *newStream = streamList->streams[j];
13556
13557 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13558 bool isImplDef = (newStream->format ==
13559 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13560 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13561 isImplDef);
13562
13563 if (isRotated && (!isImplDef || isZsl)) {
13564 LOGE("Error: Unsupported rotation of %d requested for stream"
13565 "type:%d and stream format:%d",
13566 newStream->rotation, newStream->stream_type,
13567 newStream->format);
13568 rc = -EINVAL;
13569 break;
13570 }
13571 }
13572
13573 return rc;
13574}
13575
13576/*===========================================================================
13577* FUNCTION : getFlashInfo
13578*
13579* DESCRIPTION: Retrieve information about whether the device has a flash.
13580*
13581* PARAMETERS :
13582* @cameraId : Camera id to query
13583* @hasFlash : Boolean indicating whether there is a flash device
13584* associated with given camera
13585* @flashNode : If a flash device exists, this will be its device node.
13586*
13587* RETURN :
13588* None
13589*==========================================================================*/
13590void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13591 bool& hasFlash,
13592 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13593{
13594 cam_capability_t* camCapability = gCamCapability[cameraId];
13595 if (NULL == camCapability) {
13596 hasFlash = false;
13597 flashNode[0] = '\0';
13598 } else {
13599 hasFlash = camCapability->flash_available;
13600 strlcpy(flashNode,
13601 (char*)camCapability->flash_dev_name,
13602 QCAMERA_MAX_FILEPATH_LENGTH);
13603 }
13604}
13605
13606/*===========================================================================
13607* FUNCTION : getEepromVersionInfo
13608*
13609* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13610*
13611* PARAMETERS : None
13612*
13613* RETURN : string describing EEPROM version
13614* "\0" if no such info available
13615*==========================================================================*/
13616const char *QCamera3HardwareInterface::getEepromVersionInfo()
13617{
13618 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13619}
13620
13621/*===========================================================================
13622* FUNCTION : getLdafCalib
13623*
13624* DESCRIPTION: Retrieve Laser AF calibration data
13625*
13626* PARAMETERS : None
13627*
13628* RETURN : Two uint32_t describing laser AF calibration data
13629* NULL if none is available.
13630*==========================================================================*/
13631const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13632{
13633 if (mLdafCalibExist) {
13634 return &mLdafCalib[0];
13635 } else {
13636 return NULL;
13637 }
13638}
13639
13640/*===========================================================================
13641 * FUNCTION : dynamicUpdateMetaStreamInfo
13642 *
13643 * DESCRIPTION: This function:
13644 * (1) stops all the channels
13645 * (2) returns error on pending requests and buffers
13646 * (3) sends metastream_info in setparams
13647 * (4) starts all channels
13648 * This is useful when sensor has to be restarted to apply any
13649 * settings such as frame rate from a different sensor mode
13650 *
13651 * PARAMETERS : None
13652 *
13653 * RETURN : NO_ERROR on success
13654 * Error codes on failure
13655 *
13656 *==========================================================================*/
13657int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13658{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013659 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013660 int rc = NO_ERROR;
13661
13662 LOGD("E");
13663
13664 rc = stopAllChannels();
13665 if (rc < 0) {
13666 LOGE("stopAllChannels failed");
13667 return rc;
13668 }
13669
13670 rc = notifyErrorForPendingRequests();
13671 if (rc < 0) {
13672 LOGE("notifyErrorForPendingRequests failed");
13673 return rc;
13674 }
13675
13676 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13677 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13678 "Format:%d",
13679 mStreamConfigInfo.type[i],
13680 mStreamConfigInfo.stream_sizes[i].width,
13681 mStreamConfigInfo.stream_sizes[i].height,
13682 mStreamConfigInfo.postprocess_mask[i],
13683 mStreamConfigInfo.format[i]);
13684 }
13685
13686 /* Send meta stream info once again so that ISP can start */
13687 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13688 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13689 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13690 mParameters);
13691 if (rc < 0) {
13692 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13693 }
13694
13695 rc = startAllChannels();
13696 if (rc < 0) {
13697 LOGE("startAllChannels failed");
13698 return rc;
13699 }
13700
13701 LOGD("X");
13702 return rc;
13703}
13704
13705/*===========================================================================
13706 * FUNCTION : stopAllChannels
13707 *
13708 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13709 *
13710 * PARAMETERS : None
13711 *
13712 * RETURN : NO_ERROR on success
13713 * Error codes on failure
13714 *
13715 *==========================================================================*/
13716int32_t QCamera3HardwareInterface::stopAllChannels()
13717{
13718 int32_t rc = NO_ERROR;
13719
13720 LOGD("Stopping all channels");
13721 // Stop the Streams/Channels
13722 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13723 it != mStreamInfo.end(); it++) {
13724 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13725 if (channel) {
13726 channel->stop();
13727 }
13728 (*it)->status = INVALID;
13729 }
13730
13731 if (mSupportChannel) {
13732 mSupportChannel->stop();
13733 }
13734 if (mAnalysisChannel) {
13735 mAnalysisChannel->stop();
13736 }
13737 if (mRawDumpChannel) {
13738 mRawDumpChannel->stop();
13739 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013740 if (mHdrPlusRawSrcChannel) {
13741 mHdrPlusRawSrcChannel->stop();
13742 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013743 if (mMetadataChannel) {
13744 /* If content of mStreamInfo is not 0, there is metadata stream */
13745 mMetadataChannel->stop();
13746 }
13747
13748 LOGD("All channels stopped");
13749 return rc;
13750}
13751
13752/*===========================================================================
13753 * FUNCTION : startAllChannels
13754 *
13755 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13756 *
13757 * PARAMETERS : None
13758 *
13759 * RETURN : NO_ERROR on success
13760 * Error codes on failure
13761 *
13762 *==========================================================================*/
13763int32_t QCamera3HardwareInterface::startAllChannels()
13764{
13765 int32_t rc = NO_ERROR;
13766
13767 LOGD("Start all channels ");
13768 // Start the Streams/Channels
13769 if (mMetadataChannel) {
13770 /* If content of mStreamInfo is not 0, there is metadata stream */
13771 rc = mMetadataChannel->start();
13772 if (rc < 0) {
13773 LOGE("META channel start failed");
13774 return rc;
13775 }
13776 }
13777 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13778 it != mStreamInfo.end(); it++) {
13779 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13780 if (channel) {
13781 rc = channel->start();
13782 if (rc < 0) {
13783 LOGE("channel start failed");
13784 return rc;
13785 }
13786 }
13787 }
13788 if (mAnalysisChannel) {
13789 mAnalysisChannel->start();
13790 }
13791 if (mSupportChannel) {
13792 rc = mSupportChannel->start();
13793 if (rc < 0) {
13794 LOGE("Support channel start failed");
13795 return rc;
13796 }
13797 }
13798 if (mRawDumpChannel) {
13799 rc = mRawDumpChannel->start();
13800 if (rc < 0) {
13801 LOGE("RAW dump channel start failed");
13802 return rc;
13803 }
13804 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013805 if (mHdrPlusRawSrcChannel) {
13806 rc = mHdrPlusRawSrcChannel->start();
13807 if (rc < 0) {
13808 LOGE("HDR+ RAW channel start failed");
13809 return rc;
13810 }
13811 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013812
13813 LOGD("All channels started");
13814 return rc;
13815}
13816
13817/*===========================================================================
13818 * FUNCTION : notifyErrorForPendingRequests
13819 *
13820 * DESCRIPTION: This function sends error for all the pending requests/buffers
13821 *
13822 * PARAMETERS : None
13823 *
13824 * RETURN : Error codes
13825 * NO_ERROR on success
13826 *
13827 *==========================================================================*/
13828int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13829{
13830 int32_t rc = NO_ERROR;
13831 unsigned int frameNum = 0;
13832 camera3_capture_result_t result;
13833 camera3_stream_buffer_t *pStream_Buf = NULL;
13834
13835 memset(&result, 0, sizeof(camera3_capture_result_t));
13836
13837 if (mPendingRequestsList.size() > 0) {
13838 pendingRequestIterator i = mPendingRequestsList.begin();
13839 frameNum = i->frame_number;
13840 } else {
13841 /* There might still be pending buffers even though there are
13842 no pending requests. Setting the frameNum to MAX so that
13843 all the buffers with smaller frame numbers are returned */
13844 frameNum = UINT_MAX;
13845 }
13846
13847 LOGH("Oldest frame num on mPendingRequestsList = %u",
13848 frameNum);
13849
Emilian Peev7650c122017-01-19 08:24:33 -080013850 notifyErrorFoPendingDepthData(mDepthChannel);
13851
Thierry Strudel3d639192016-09-09 11:52:26 -070013852 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13853 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13854
13855 if (req->frame_number < frameNum) {
13856 // Send Error notify to frameworks for each buffer for which
13857 // metadata buffer is already sent
13858 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13859 req->frame_number, req->mPendingBufferList.size());
13860
13861 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13862 if (NULL == pStream_Buf) {
13863 LOGE("No memory for pending buffers array");
13864 return NO_MEMORY;
13865 }
13866 memset(pStream_Buf, 0,
13867 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13868 result.result = NULL;
13869 result.frame_number = req->frame_number;
13870 result.num_output_buffers = req->mPendingBufferList.size();
13871 result.output_buffers = pStream_Buf;
13872
13873 size_t index = 0;
13874 for (auto info = req->mPendingBufferList.begin();
13875 info != req->mPendingBufferList.end(); ) {
13876
13877 camera3_notify_msg_t notify_msg;
13878 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13879 notify_msg.type = CAMERA3_MSG_ERROR;
13880 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13881 notify_msg.message.error.error_stream = info->stream;
13882 notify_msg.message.error.frame_number = req->frame_number;
13883 pStream_Buf[index].acquire_fence = -1;
13884 pStream_Buf[index].release_fence = -1;
13885 pStream_Buf[index].buffer = info->buffer;
13886 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13887 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013888 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013889 index++;
13890 // Remove buffer from list
13891 info = req->mPendingBufferList.erase(info);
13892 }
13893
13894 // Remove this request from Map
13895 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13896 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13897 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13898
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013899 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013900
13901 delete [] pStream_Buf;
13902 } else {
13903
13904 // Go through the pending requests info and send error request to framework
13905 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13906
13907 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13908
13909 // Send error notify to frameworks
13910 camera3_notify_msg_t notify_msg;
13911 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13912 notify_msg.type = CAMERA3_MSG_ERROR;
13913 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13914 notify_msg.message.error.error_stream = NULL;
13915 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013916 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013917
13918 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13919 if (NULL == pStream_Buf) {
13920 LOGE("No memory for pending buffers array");
13921 return NO_MEMORY;
13922 }
13923 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13924
13925 result.result = NULL;
13926 result.frame_number = req->frame_number;
13927 result.input_buffer = i->input_buffer;
13928 result.num_output_buffers = req->mPendingBufferList.size();
13929 result.output_buffers = pStream_Buf;
13930
13931 size_t index = 0;
13932 for (auto info = req->mPendingBufferList.begin();
13933 info != req->mPendingBufferList.end(); ) {
13934 pStream_Buf[index].acquire_fence = -1;
13935 pStream_Buf[index].release_fence = -1;
13936 pStream_Buf[index].buffer = info->buffer;
13937 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13938 pStream_Buf[index].stream = info->stream;
13939 index++;
13940 // Remove buffer from list
13941 info = req->mPendingBufferList.erase(info);
13942 }
13943
13944 // Remove this request from Map
13945 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13946 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13947 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13948
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013949 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013950 delete [] pStream_Buf;
13951 i = erasePendingRequest(i);
13952 }
13953 }
13954
13955 /* Reset pending frame Drop list and requests list */
13956 mPendingFrameDropList.clear();
13957
13958 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13959 req.mPendingBufferList.clear();
13960 }
13961 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013962 LOGH("Cleared all the pending buffers ");
13963
13964 return rc;
13965}
13966
13967bool QCamera3HardwareInterface::isOnEncoder(
13968 const cam_dimension_t max_viewfinder_size,
13969 uint32_t width, uint32_t height)
13970{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013971 return ((width > (uint32_t)max_viewfinder_size.width) ||
13972 (height > (uint32_t)max_viewfinder_size.height) ||
13973 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13974 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013975}
13976
13977/*===========================================================================
13978 * FUNCTION : setBundleInfo
13979 *
13980 * DESCRIPTION: Set bundle info for all streams that are bundle.
13981 *
13982 * PARAMETERS : None
13983 *
13984 * RETURN : NO_ERROR on success
13985 * Error codes on failure
13986 *==========================================================================*/
13987int32_t QCamera3HardwareInterface::setBundleInfo()
13988{
13989 int32_t rc = NO_ERROR;
13990
13991 if (mChannelHandle) {
13992 cam_bundle_config_t bundleInfo;
13993 memset(&bundleInfo, 0, sizeof(bundleInfo));
13994 rc = mCameraHandle->ops->get_bundle_info(
13995 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13996 if (rc != NO_ERROR) {
13997 LOGE("get_bundle_info failed");
13998 return rc;
13999 }
14000 if (mAnalysisChannel) {
14001 mAnalysisChannel->setBundleInfo(bundleInfo);
14002 }
14003 if (mSupportChannel) {
14004 mSupportChannel->setBundleInfo(bundleInfo);
14005 }
14006 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14007 it != mStreamInfo.end(); it++) {
14008 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14009 channel->setBundleInfo(bundleInfo);
14010 }
14011 if (mRawDumpChannel) {
14012 mRawDumpChannel->setBundleInfo(bundleInfo);
14013 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014014 if (mHdrPlusRawSrcChannel) {
14015 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14016 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014017 }
14018
14019 return rc;
14020}
14021
14022/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014023 * FUNCTION : setInstantAEC
14024 *
14025 * DESCRIPTION: Set Instant AEC related params.
14026 *
14027 * PARAMETERS :
14028 * @meta: CameraMetadata reference
14029 *
14030 * RETURN : NO_ERROR on success
14031 * Error codes on failure
14032 *==========================================================================*/
14033int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14034{
14035 int32_t rc = NO_ERROR;
14036 uint8_t val = 0;
14037 char prop[PROPERTY_VALUE_MAX];
14038
14039 // First try to configure instant AEC from framework metadata
14040 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14041 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14042 }
14043
14044 // If framework did not set this value, try to read from set prop.
14045 if (val == 0) {
14046 memset(prop, 0, sizeof(prop));
14047 property_get("persist.camera.instant.aec", prop, "0");
14048 val = (uint8_t)atoi(prop);
14049 }
14050
14051 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14052 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14053 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14054 mInstantAEC = val;
14055 mInstantAECSettledFrameNumber = 0;
14056 mInstantAecFrameIdxCount = 0;
14057 LOGH("instantAEC value set %d",val);
14058 if (mInstantAEC) {
14059 memset(prop, 0, sizeof(prop));
14060 property_get("persist.camera.ae.instant.bound", prop, "10");
14061 int32_t aec_frame_skip_cnt = atoi(prop);
14062 if (aec_frame_skip_cnt >= 0) {
14063 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14064 } else {
14065 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14066 rc = BAD_VALUE;
14067 }
14068 }
14069 } else {
14070 LOGE("Bad instant aec value set %d", val);
14071 rc = BAD_VALUE;
14072 }
14073 return rc;
14074}
14075
14076/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014077 * FUNCTION : get_num_overall_buffers
14078 *
14079 * DESCRIPTION: Estimate number of pending buffers across all requests.
14080 *
14081 * PARAMETERS : None
14082 *
14083 * RETURN : Number of overall pending buffers
14084 *
14085 *==========================================================================*/
14086uint32_t PendingBuffersMap::get_num_overall_buffers()
14087{
14088 uint32_t sum_buffers = 0;
14089 for (auto &req : mPendingBuffersInRequest) {
14090 sum_buffers += req.mPendingBufferList.size();
14091 }
14092 return sum_buffers;
14093}
14094
14095/*===========================================================================
14096 * FUNCTION : removeBuf
14097 *
14098 * DESCRIPTION: Remove a matching buffer from tracker.
14099 *
14100 * PARAMETERS : @buffer: image buffer for the callback
14101 *
14102 * RETURN : None
14103 *
14104 *==========================================================================*/
14105void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14106{
14107 bool buffer_found = false;
14108 for (auto req = mPendingBuffersInRequest.begin();
14109 req != mPendingBuffersInRequest.end(); req++) {
14110 for (auto k = req->mPendingBufferList.begin();
14111 k != req->mPendingBufferList.end(); k++ ) {
14112 if (k->buffer == buffer) {
14113 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14114 req->frame_number, buffer);
14115 k = req->mPendingBufferList.erase(k);
14116 if (req->mPendingBufferList.empty()) {
14117 // Remove this request from Map
14118 req = mPendingBuffersInRequest.erase(req);
14119 }
14120 buffer_found = true;
14121 break;
14122 }
14123 }
14124 if (buffer_found) {
14125 break;
14126 }
14127 }
14128 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14129 get_num_overall_buffers());
14130}
14131
14132/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014133 * FUNCTION : getBufErrStatus
14134 *
14135 * DESCRIPTION: get buffer error status
14136 *
14137 * PARAMETERS : @buffer: buffer handle
14138 *
14139 * RETURN : Error status
14140 *
14141 *==========================================================================*/
14142int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14143{
14144 for (auto& req : mPendingBuffersInRequest) {
14145 for (auto& k : req.mPendingBufferList) {
14146 if (k.buffer == buffer)
14147 return k.bufStatus;
14148 }
14149 }
14150 return CAMERA3_BUFFER_STATUS_OK;
14151}
14152
14153/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014154 * FUNCTION : setPAAFSupport
14155 *
14156 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14157 * feature mask according to stream type and filter
14158 * arrangement
14159 *
14160 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14161 * @stream_type: stream type
14162 * @filter_arrangement: filter arrangement
14163 *
14164 * RETURN : None
14165 *==========================================================================*/
14166void QCamera3HardwareInterface::setPAAFSupport(
14167 cam_feature_mask_t& feature_mask,
14168 cam_stream_type_t stream_type,
14169 cam_color_filter_arrangement_t filter_arrangement)
14170{
Thierry Strudel3d639192016-09-09 11:52:26 -070014171 switch (filter_arrangement) {
14172 case CAM_FILTER_ARRANGEMENT_RGGB:
14173 case CAM_FILTER_ARRANGEMENT_GRBG:
14174 case CAM_FILTER_ARRANGEMENT_GBRG:
14175 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014176 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14177 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014178 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014179 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14180 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014181 }
14182 break;
14183 case CAM_FILTER_ARRANGEMENT_Y:
14184 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14185 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14186 }
14187 break;
14188 default:
14189 break;
14190 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014191 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14192 feature_mask, stream_type, filter_arrangement);
14193
14194
Thierry Strudel3d639192016-09-09 11:52:26 -070014195}
14196
14197/*===========================================================================
14198* FUNCTION : getSensorMountAngle
14199*
14200* DESCRIPTION: Retrieve sensor mount angle
14201*
14202* PARAMETERS : None
14203*
14204* RETURN : sensor mount angle in uint32_t
14205*==========================================================================*/
14206uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14207{
14208 return gCamCapability[mCameraId]->sensor_mount_angle;
14209}
14210
14211/*===========================================================================
14212* FUNCTION : getRelatedCalibrationData
14213*
14214* DESCRIPTION: Retrieve related system calibration data
14215*
14216* PARAMETERS : None
14217*
14218* RETURN : Pointer of related system calibration data
14219*==========================================================================*/
14220const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14221{
14222 return (const cam_related_system_calibration_data_t *)
14223 &(gCamCapability[mCameraId]->related_cam_calibration);
14224}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014225
14226/*===========================================================================
14227 * FUNCTION : is60HzZone
14228 *
14229 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14230 *
14231 * PARAMETERS : None
14232 *
14233 * RETURN : True if in 60Hz zone, False otherwise
14234 *==========================================================================*/
14235bool QCamera3HardwareInterface::is60HzZone()
14236{
14237 time_t t = time(NULL);
14238 struct tm lt;
14239
14240 struct tm* r = localtime_r(&t, &lt);
14241
14242 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14243 return true;
14244 else
14245 return false;
14246}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014247
14248/*===========================================================================
14249 * FUNCTION : adjustBlackLevelForCFA
14250 *
14251 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14252 * of bayer CFA (Color Filter Array).
14253 *
14254 * PARAMETERS : @input: black level pattern in the order of RGGB
14255 * @output: black level pattern in the order of CFA
14256 * @color_arrangement: CFA color arrangement
14257 *
14258 * RETURN : None
14259 *==========================================================================*/
14260template<typename T>
14261void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14262 T input[BLACK_LEVEL_PATTERN_CNT],
14263 T output[BLACK_LEVEL_PATTERN_CNT],
14264 cam_color_filter_arrangement_t color_arrangement)
14265{
14266 switch (color_arrangement) {
14267 case CAM_FILTER_ARRANGEMENT_GRBG:
14268 output[0] = input[1];
14269 output[1] = input[0];
14270 output[2] = input[3];
14271 output[3] = input[2];
14272 break;
14273 case CAM_FILTER_ARRANGEMENT_GBRG:
14274 output[0] = input[2];
14275 output[1] = input[3];
14276 output[2] = input[0];
14277 output[3] = input[1];
14278 break;
14279 case CAM_FILTER_ARRANGEMENT_BGGR:
14280 output[0] = input[3];
14281 output[1] = input[2];
14282 output[2] = input[1];
14283 output[3] = input[0];
14284 break;
14285 case CAM_FILTER_ARRANGEMENT_RGGB:
14286 output[0] = input[0];
14287 output[1] = input[1];
14288 output[2] = input[2];
14289 output[3] = input[3];
14290 break;
14291 default:
14292 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14293 break;
14294 }
14295}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014296
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014297void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14298 CameraMetadata &resultMetadata,
14299 std::shared_ptr<metadata_buffer_t> settings)
14300{
14301 if (settings == nullptr) {
14302 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14303 return;
14304 }
14305
14306 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14307 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14308 }
14309
14310 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14311 String8 str((const char *)gps_methods);
14312 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14313 }
14314
14315 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14316 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14317 }
14318
14319 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14320 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14321 }
14322
14323 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14324 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14325 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14326 }
14327
14328 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14329 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14330 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14331 }
14332
14333 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14334 int32_t fwk_thumb_size[2];
14335 fwk_thumb_size[0] = thumb_size->width;
14336 fwk_thumb_size[1] = thumb_size->height;
14337 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14338 }
14339
14340 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14341 uint8_t fwk_intent = intent[0];
14342 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14343 }
14344}
14345
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014346bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14347 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14348 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014349{
14350 if (hdrPlusRequest == nullptr) return false;
14351
14352 // Check noise reduction mode is high quality.
14353 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14354 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14355 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014356 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14357 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014358 return false;
14359 }
14360
14361 // Check edge mode is high quality.
14362 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14363 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14364 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14365 return false;
14366 }
14367
14368 if (request.num_output_buffers != 1 ||
14369 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14370 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014371 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14372 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14373 request.output_buffers[0].stream->width,
14374 request.output_buffers[0].stream->height,
14375 request.output_buffers[0].stream->format);
14376 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014377 return false;
14378 }
14379
14380 // Get a YUV buffer from pic channel.
14381 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14382 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14383 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14384 if (res != OK) {
14385 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14386 __FUNCTION__, strerror(-res), res);
14387 return false;
14388 }
14389
14390 pbcamera::StreamBuffer buffer;
14391 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014392 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014393 buffer.data = yuvBuffer->buffer;
14394 buffer.dataSize = yuvBuffer->frame_len;
14395
14396 pbcamera::CaptureRequest pbRequest;
14397 pbRequest.id = request.frame_number;
14398 pbRequest.outputBuffers.push_back(buffer);
14399
14400 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014401 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014402 if (res != OK) {
14403 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14404 strerror(-res), res);
14405 return false;
14406 }
14407
14408 hdrPlusRequest->yuvBuffer = yuvBuffer;
14409 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14410
14411 return true;
14412}
14413
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014414status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked() {
14415 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14416 return OK;
14417 }
14418
14419 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14420 if (res != OK) {
14421 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14422 strerror(-res), res);
14423 return res;
14424 }
14425 gHdrPlusClientOpening = true;
14426
14427 return OK;
14428}
14429
Chien-Yu Chenee335912017-02-09 17:53:20 -080014430status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14431{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014432 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014433
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014434 // Check if gHdrPlusClient is opened or being opened.
14435 if (gHdrPlusClient == nullptr) {
14436 if (gHdrPlusClientOpening) {
14437 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14438 return OK;
14439 }
14440
14441 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014442 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014443 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14444 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014445 return res;
14446 }
14447
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014448 // When opening HDR+ client completes, HDR+ mode will be enabled.
14449 return OK;
14450
Chien-Yu Chenee335912017-02-09 17:53:20 -080014451 }
14452
14453 // Configure stream for HDR+.
14454 res = configureHdrPlusStreamsLocked();
14455 if (res != OK) {
14456 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014457 return res;
14458 }
14459
14460 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14461 res = gHdrPlusClient->setZslHdrPlusMode(true);
14462 if (res != OK) {
14463 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014464 return res;
14465 }
14466
14467 mHdrPlusModeEnabled = true;
14468 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14469
14470 return OK;
14471}
14472
14473void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14474{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014475 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014476 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014477 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14478 if (res != OK) {
14479 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14480 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014481
14482 // Close HDR+ client so Easel can enter low power mode.
14483 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14484 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014485 }
14486
14487 mHdrPlusModeEnabled = false;
14488 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14489}
14490
14491status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014492{
14493 pbcamera::InputConfiguration inputConfig;
14494 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14495 status_t res = OK;
14496
14497 // Configure HDR+ client streams.
14498 // Get input config.
14499 if (mHdrPlusRawSrcChannel) {
14500 // HDR+ input buffers will be provided by HAL.
14501 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14502 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14503 if (res != OK) {
14504 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14505 __FUNCTION__, strerror(-res), res);
14506 return res;
14507 }
14508
14509 inputConfig.isSensorInput = false;
14510 } else {
14511 // Sensor MIPI will send data to Easel.
14512 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014513 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014514 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14515 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14516 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14517 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14518 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14519 if (mSensorModeInfo.num_raw_bits != 10) {
14520 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14521 mSensorModeInfo.num_raw_bits);
14522 return BAD_VALUE;
14523 }
14524
14525 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014526 }
14527
14528 // Get output configurations.
14529 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014530 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014531
14532 // Easel may need to output YUV output buffers if mPictureChannel was created.
14533 pbcamera::StreamConfiguration yuvOutputConfig;
14534 if (mPictureChannel != nullptr) {
14535 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14536 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14537 if (res != OK) {
14538 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14539 __FUNCTION__, strerror(-res), res);
14540
14541 return res;
14542 }
14543
14544 outputStreamConfigs.push_back(yuvOutputConfig);
14545 }
14546
14547 // TODO: consider other channels for YUV output buffers.
14548
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014549 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014550 if (res != OK) {
14551 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14552 strerror(-res), res);
14553 return res;
14554 }
14555
14556 return OK;
14557}
14558
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014559void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client) {
14560 if (client == nullptr) {
14561 ALOGE("%s: Opened client is null.", __FUNCTION__);
14562 return;
14563 }
14564
14565 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14566
14567 Mutex::Autolock l(gHdrPlusClientLock);
14568 gHdrPlusClient = std::move(client);
14569 gHdrPlusClientOpening = false;
14570
14571 // Set static metadata.
14572 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14573 if (res != OK) {
14574 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14575 __FUNCTION__, strerror(-res), res);
14576 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14577 gHdrPlusClient = nullptr;
14578 return;
14579 }
14580
14581 // Enable HDR+ mode.
14582 res = enableHdrPlusModeLocked();
14583 if (res != OK) {
14584 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14585 }
14586}
14587
14588void QCamera3HardwareInterface::onOpenFailed(status_t err) {
14589 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14590 Mutex::Autolock l(gHdrPlusClientLock);
14591 gHdrPlusClientOpening = false;
14592}
14593
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014594void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14595 const camera_metadata_t &resultMetadata) {
14596 if (result != nullptr) {
14597 if (result->outputBuffers.size() != 1) {
14598 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14599 result->outputBuffers.size());
14600 return;
14601 }
14602
14603 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14604 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14605 result->outputBuffers[0].streamId);
14606 return;
14607 }
14608
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014609 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014610 HdrPlusPendingRequest pendingRequest;
14611 {
14612 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14613 auto req = mHdrPlusPendingRequests.find(result->requestId);
14614 pendingRequest = req->second;
14615 }
14616
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014617 // Update the result metadata with the settings of the HDR+ still capture request because
14618 // the result metadata belongs to a ZSL buffer.
14619 CameraMetadata metadata;
14620 metadata = &resultMetadata;
14621 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14622 camera_metadata_t* updatedResultMetadata = metadata.release();
14623
14624 QCamera3PicChannel *picChannel =
14625 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14626
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014627 // Check if dumping HDR+ YUV output is enabled.
14628 char prop[PROPERTY_VALUE_MAX];
14629 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14630 bool dumpYuvOutput = atoi(prop);
14631
14632 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014633 // Dump yuv buffer to a ppm file.
14634 pbcamera::StreamConfiguration outputConfig;
14635 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14636 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14637 if (rc == OK) {
14638 char buf[FILENAME_MAX] = {};
14639 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14640 result->requestId, result->outputBuffers[0].streamId,
14641 outputConfig.image.width, outputConfig.image.height);
14642
14643 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14644 } else {
14645 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14646 __FUNCTION__, strerror(-rc), rc);
14647 }
14648 }
14649
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014650 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14651 auto halMetadata = std::make_shared<metadata_buffer_t>();
14652 clear_metadata_buffer(halMetadata.get());
14653
14654 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14655 // encoding.
14656 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14657 halStreamId, /*minFrameDuration*/0);
14658 if (res == OK) {
14659 // Return the buffer to pic channel for encoding.
14660 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14661 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14662 halMetadata);
14663 } else {
14664 // Return the buffer without encoding.
14665 // TODO: This should not happen but we may want to report an error buffer to camera
14666 // service.
14667 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14668 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14669 strerror(-res), res);
14670 }
14671
14672 // Send HDR+ metadata to framework.
14673 {
14674 pthread_mutex_lock(&mMutex);
14675
14676 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14677 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14678 pthread_mutex_unlock(&mMutex);
14679 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014680
14681 // Remove the HDR+ pending request.
14682 {
14683 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14684 auto req = mHdrPlusPendingRequests.find(result->requestId);
14685 mHdrPlusPendingRequests.erase(req);
14686 }
14687 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014688}
14689
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014690void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14691 // TODO: Handle HDR+ capture failures and send the failure to framework.
14692 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14693 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14694
14695 // Return the buffer to pic channel.
14696 QCamera3PicChannel *picChannel =
14697 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14698 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14699
14700 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014701}
14702
Thierry Strudel3d639192016-09-09 11:52:26 -070014703}; //end namespace qcamera