blob: 5e345cedae1e05bbf3b75f172979e5a013761e8d [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Thierry Strudel3d639192016-09-09 11:52:26 -0700136cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
137const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
138extern pthread_mutex_t gCamLock;
139volatile uint32_t gCamHal3LogLevel = 1;
140extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700141
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800142// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143// The following Easel related variables must be protected by gHdrPlusClientLock.
144EaselManagerClient gEaselManagerClient;
145bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
146std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
147bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700148bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700149bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800151// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
152bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
154Mutex gHdrPlusClientLock; // Protect above Easel related variables.
155
Thierry Strudel3d639192016-09-09 11:52:26 -0700156
157const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
158 {"On", CAM_CDS_MODE_ON},
159 {"Off", CAM_CDS_MODE_OFF},
160 {"Auto",CAM_CDS_MODE_AUTO}
161};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700162const QCamera3HardwareInterface::QCameraMap<
163 camera_metadata_enum_android_video_hdr_mode_t,
164 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
165 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
166 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
167};
168
Thierry Strudel54dc9782017-02-15 12:12:10 -0800169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_binning_correction_mode_t,
171 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
172 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
173 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
174};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700175
176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_ir_mode_t,
178 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
179 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
180 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
181 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
182};
Thierry Strudel3d639192016-09-09 11:52:26 -0700183
184const QCamera3HardwareInterface::QCameraMap<
185 camera_metadata_enum_android_control_effect_mode_t,
186 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
187 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
188 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
189 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
190 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
191 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
192 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
193 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
194 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
195 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
196};
197
198const QCamera3HardwareInterface::QCameraMap<
199 camera_metadata_enum_android_control_awb_mode_t,
200 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
201 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
202 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
203 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
204 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
205 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
206 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
207 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
208 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
209 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
210};
211
212const QCamera3HardwareInterface::QCameraMap<
213 camera_metadata_enum_android_control_scene_mode_t,
214 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
215 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
216 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
217 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
218 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
219 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
220 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
222 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
223 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
224 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
225 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
226 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
227 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
228 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
229 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800230 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
231 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700232};
233
234const QCamera3HardwareInterface::QCameraMap<
235 camera_metadata_enum_android_control_af_mode_t,
236 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
237 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
238 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
239 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
240 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
241 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
242 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
243 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
244};
245
246const QCamera3HardwareInterface::QCameraMap<
247 camera_metadata_enum_android_color_correction_aberration_mode_t,
248 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
249 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
250 CAM_COLOR_CORRECTION_ABERRATION_OFF },
251 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
252 CAM_COLOR_CORRECTION_ABERRATION_FAST },
253 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
254 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_control_ae_antibanding_mode_t,
259 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
260 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
261 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
262 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_control_ae_mode_t,
268 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
269 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
270 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
271 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
272 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
273 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
274};
275
276const QCamera3HardwareInterface::QCameraMap<
277 camera_metadata_enum_android_flash_mode_t,
278 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
279 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
280 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
281 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_statistics_face_detect_mode_t,
286 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
287 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
288 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
289 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
294 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
295 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
296 CAM_FOCUS_UNCALIBRATED },
297 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
298 CAM_FOCUS_APPROXIMATE },
299 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
300 CAM_FOCUS_CALIBRATED }
301};
302
303const QCamera3HardwareInterface::QCameraMap<
304 camera_metadata_enum_android_lens_state_t,
305 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
306 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
307 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
308};
309
310const int32_t available_thumbnail_sizes[] = {0, 0,
311 176, 144,
312 240, 144,
313 256, 144,
314 240, 160,
315 256, 154,
316 240, 240,
317 320, 240};
318
319const QCamera3HardwareInterface::QCameraMap<
320 camera_metadata_enum_android_sensor_test_pattern_mode_t,
321 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
322 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
323 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
324 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
328};
329
330/* Since there is no mapping for all the options some Android enum are not listed.
331 * Also, the order in this list is important because while mapping from HAL to Android it will
332 * traverse from lower to higher index which means that for HAL values that are map to different
333 * Android values, the traverse logic will select the first one found.
334 */
335const QCamera3HardwareInterface::QCameraMap<
336 camera_metadata_enum_android_sensor_reference_illuminant1_t,
337 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
338 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
339 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
340 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
354};
355
356const QCamera3HardwareInterface::QCameraMap<
357 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
358 { 60, CAM_HFR_MODE_60FPS},
359 { 90, CAM_HFR_MODE_90FPS},
360 { 120, CAM_HFR_MODE_120FPS},
361 { 150, CAM_HFR_MODE_150FPS},
362 { 180, CAM_HFR_MODE_180FPS},
363 { 210, CAM_HFR_MODE_210FPS},
364 { 240, CAM_HFR_MODE_240FPS},
365 { 480, CAM_HFR_MODE_480FPS},
366};
367
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700368const QCamera3HardwareInterface::QCameraMap<
369 qcamera3_ext_instant_aec_mode_t,
370 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
371 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
372 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
373 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
374};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800375
376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_exposure_meter_mode_t,
378 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
379 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
380 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
381 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
382 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
383 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
386};
387
388const QCamera3HardwareInterface::QCameraMap<
389 qcamera3_ext_iso_mode_t,
390 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
391 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
392 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
393 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
394 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
395 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
396 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
397 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
398 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
399};
400
Thierry Strudel3d639192016-09-09 11:52:26 -0700401camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
402 .initialize = QCamera3HardwareInterface::initialize,
403 .configure_streams = QCamera3HardwareInterface::configure_streams,
404 .register_stream_buffers = NULL,
405 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
406 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
407 .get_metadata_vendor_tag_ops = NULL,
408 .dump = QCamera3HardwareInterface::dump,
409 .flush = QCamera3HardwareInterface::flush,
410 .reserved = {0},
411};
412
413// initialise to some default value
414uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
415
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700416static inline void logEaselEvent(const char *tag, const char *event) {
417 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
418 struct timespec ts = {};
419 static int64_t kMsPerSec = 1000;
420 static int64_t kNsPerMs = 1000000;
421 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
422 if (res != OK) {
423 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
424 } else {
425 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
426 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
427 }
428 }
429}
430
Thierry Strudel3d639192016-09-09 11:52:26 -0700431/*===========================================================================
432 * FUNCTION : QCamera3HardwareInterface
433 *
434 * DESCRIPTION: constructor of QCamera3HardwareInterface
435 *
436 * PARAMETERS :
437 * @cameraId : camera ID
438 *
439 * RETURN : none
440 *==========================================================================*/
441QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
442 const camera_module_callbacks_t *callbacks)
443 : mCameraId(cameraId),
444 mCameraHandle(NULL),
445 mCameraInitialized(false),
446 mCallbackOps(NULL),
447 mMetadataChannel(NULL),
448 mPictureChannel(NULL),
449 mRawChannel(NULL),
450 mSupportChannel(NULL),
451 mAnalysisChannel(NULL),
452 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700453 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800455 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mChannelHandle(0),
458 mFirstConfiguration(true),
459 mFlush(false),
460 mFlushPerf(false),
461 mParamHeap(NULL),
462 mParameters(NULL),
463 mPrevParameters(NULL),
464 m_bIsVideo(false),
465 m_bIs4KVideo(false),
466 m_bEisSupportedSize(false),
467 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800468 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700469 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700470 mShutterDispatcher(this),
471 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700472 mMinProcessedFrameDuration(0),
473 mMinJpegFrameDuration(0),
474 mMinRawFrameDuration(0),
475 mMetaFrameCount(0U),
476 mUpdateDebugLevel(false),
477 mCallbacks(callbacks),
478 mCaptureIntent(0),
479 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700480 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800481 /* DevCamDebug metadata internal m control*/
482 mDevCamDebugMetaEnable(0),
483 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700484 mBatchSize(0),
485 mToBeQueuedVidBufs(0),
486 mHFRVideoFps(DEFAULT_VIDEO_FPS),
487 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800488 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800489 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700490 mFirstFrameNumberInBatch(0),
491 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800492 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700493 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
494 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000495 mPDSupported(false),
496 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700497 mInstantAEC(false),
498 mResetInstantAEC(false),
499 mInstantAECSettledFrameNumber(0),
500 mAecSkipDisplayFrameBound(0),
501 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800502 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700503 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700504 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700505 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700506 mState(CLOSED),
507 mIsDeviceLinked(false),
508 mIsMainCamera(true),
509 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700510 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800511 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800512 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700513 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800514 mIsApInputUsedForHdrPlus(false),
515 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800516 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700517{
518 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700519 mCommon.init(gCamCapability[cameraId]);
520 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700521#ifndef USE_HAL_3_3
522 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
523#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700524 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700525#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700526 mCameraDevice.common.close = close_camera_device;
527 mCameraDevice.ops = &mCameraOps;
528 mCameraDevice.priv = this;
529 gCamCapability[cameraId]->version = CAM_HAL_V3;
530 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
531 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
532 gCamCapability[cameraId]->min_num_pp_bufs = 3;
533
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800534 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700535
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800536 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700537 mPendingLiveRequest = 0;
538 mCurrentRequestId = -1;
539 pthread_mutex_init(&mMutex, NULL);
540
541 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
542 mDefaultMetadata[i] = NULL;
543
544 // Getting system props of different kinds
545 char prop[PROPERTY_VALUE_MAX];
546 memset(prop, 0, sizeof(prop));
547 property_get("persist.camera.raw.dump", prop, "0");
548 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800549 property_get("persist.camera.hal3.force.hdr", prop, "0");
550 mForceHdrSnapshot = atoi(prop);
551
Thierry Strudel3d639192016-09-09 11:52:26 -0700552 if (mEnableRawDump)
553 LOGD("Raw dump from Camera HAL enabled");
554
555 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
556 memset(mLdafCalib, 0, sizeof(mLdafCalib));
557
558 memset(prop, 0, sizeof(prop));
559 property_get("persist.camera.tnr.preview", prop, "0");
560 m_bTnrPreview = (uint8_t)atoi(prop);
561
562 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800563 property_get("persist.camera.swtnr.preview", prop, "1");
564 m_bSwTnrPreview = (uint8_t)atoi(prop);
565
566 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700567 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700568 m_bTnrVideo = (uint8_t)atoi(prop);
569
570 memset(prop, 0, sizeof(prop));
571 property_get("persist.camera.avtimer.debug", prop, "0");
572 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800573 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700574
Thierry Strudel54dc9782017-02-15 12:12:10 -0800575 memset(prop, 0, sizeof(prop));
576 property_get("persist.camera.cacmode.disable", prop, "0");
577 m_cacModeDisabled = (uint8_t)atoi(prop);
578
Thierry Strudel3d639192016-09-09 11:52:26 -0700579 //Load and read GPU library.
580 lib_surface_utils = NULL;
581 LINK_get_surface_pixel_alignment = NULL;
582 mSurfaceStridePadding = CAM_PAD_TO_32;
583 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
584 if (lib_surface_utils) {
585 *(void **)&LINK_get_surface_pixel_alignment =
586 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
587 if (LINK_get_surface_pixel_alignment) {
588 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
589 }
590 dlclose(lib_surface_utils);
591 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700592
Emilian Peev0f3c3162017-03-15 12:57:46 +0000593 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
594 mPDSupported = (0 <= mPDIndex) ? true : false;
595
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700596 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700597}
598
599/*===========================================================================
600 * FUNCTION : ~QCamera3HardwareInterface
601 *
602 * DESCRIPTION: destructor of QCamera3HardwareInterface
603 *
604 * PARAMETERS : none
605 *
606 * RETURN : none
607 *==========================================================================*/
608QCamera3HardwareInterface::~QCamera3HardwareInterface()
609{
610 LOGD("E");
611
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800612 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700613
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800614 // Disable power hint and enable the perf lock for close camera
615 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
616 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
617
618 // unlink of dualcam during close camera
619 if (mIsDeviceLinked) {
620 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
621 &m_pDualCamCmdPtr->bundle_info;
622 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
623 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
624 pthread_mutex_lock(&gCamLock);
625
626 if (mIsMainCamera == 1) {
627 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
628 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
629 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
630 // related session id should be session id of linked session
631 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
632 } else {
633 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
634 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
635 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
636 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
637 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800638 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800639 pthread_mutex_unlock(&gCamLock);
640
641 rc = mCameraHandle->ops->set_dual_cam_cmd(
642 mCameraHandle->camera_handle);
643 if (rc < 0) {
644 LOGE("Dualcam: Unlink failed, but still proceed to close");
645 }
646 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700647
648 /* We need to stop all streams before deleting any stream */
649 if (mRawDumpChannel) {
650 mRawDumpChannel->stop();
651 }
652
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700653 if (mHdrPlusRawSrcChannel) {
654 mHdrPlusRawSrcChannel->stop();
655 }
656
Thierry Strudel3d639192016-09-09 11:52:26 -0700657 // NOTE: 'camera3_stream_t *' objects are already freed at
658 // this stage by the framework
659 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
660 it != mStreamInfo.end(); it++) {
661 QCamera3ProcessingChannel *channel = (*it)->channel;
662 if (channel) {
663 channel->stop();
664 }
665 }
666 if (mSupportChannel)
667 mSupportChannel->stop();
668
669 if (mAnalysisChannel) {
670 mAnalysisChannel->stop();
671 }
672 if (mMetadataChannel) {
673 mMetadataChannel->stop();
674 }
675 if (mChannelHandle) {
676 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
677 mChannelHandle);
678 LOGD("stopping channel %d", mChannelHandle);
679 }
680
681 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
682 it != mStreamInfo.end(); it++) {
683 QCamera3ProcessingChannel *channel = (*it)->channel;
684 if (channel)
685 delete channel;
686 free (*it);
687 }
688 if (mSupportChannel) {
689 delete mSupportChannel;
690 mSupportChannel = NULL;
691 }
692
693 if (mAnalysisChannel) {
694 delete mAnalysisChannel;
695 mAnalysisChannel = NULL;
696 }
697 if (mRawDumpChannel) {
698 delete mRawDumpChannel;
699 mRawDumpChannel = NULL;
700 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700701 if (mHdrPlusRawSrcChannel) {
702 delete mHdrPlusRawSrcChannel;
703 mHdrPlusRawSrcChannel = NULL;
704 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700705 if (mDummyBatchChannel) {
706 delete mDummyBatchChannel;
707 mDummyBatchChannel = NULL;
708 }
709
710 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800711 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700712
713 if (mMetadataChannel) {
714 delete mMetadataChannel;
715 mMetadataChannel = NULL;
716 }
717
718 /* Clean up all channels */
719 if (mCameraInitialized) {
720 if(!mFirstConfiguration){
721 //send the last unconfigure
722 cam_stream_size_info_t stream_config_info;
723 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
724 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
725 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800726 m_bIs4KVideo ? 0 :
727 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700728 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700729 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
730 stream_config_info);
731 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
732 if (rc < 0) {
733 LOGE("set_parms failed for unconfigure");
734 }
735 }
736 deinitParameters();
737 }
738
739 if (mChannelHandle) {
740 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
741 mChannelHandle);
742 LOGH("deleting channel %d", mChannelHandle);
743 mChannelHandle = 0;
744 }
745
746 if (mState != CLOSED)
747 closeCamera();
748
749 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
750 req.mPendingBufferList.clear();
751 }
752 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700753 for (pendingRequestIterator i = mPendingRequestsList.begin();
754 i != mPendingRequestsList.end();) {
755 i = erasePendingRequest(i);
756 }
757 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
758 if (mDefaultMetadata[i])
759 free_camera_metadata(mDefaultMetadata[i]);
760
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800761 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700762
763 pthread_cond_destroy(&mRequestCond);
764
765 pthread_cond_destroy(&mBuffersCond);
766
767 pthread_mutex_destroy(&mMutex);
768 LOGD("X");
769}
770
771/*===========================================================================
772 * FUNCTION : erasePendingRequest
773 *
774 * DESCRIPTION: function to erase a desired pending request after freeing any
775 * allocated memory
776 *
777 * PARAMETERS :
778 * @i : iterator pointing to pending request to be erased
779 *
780 * RETURN : iterator pointing to the next request
781 *==========================================================================*/
782QCamera3HardwareInterface::pendingRequestIterator
783 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
784{
785 if (i->input_buffer != NULL) {
786 free(i->input_buffer);
787 i->input_buffer = NULL;
788 }
789 if (i->settings != NULL)
790 free_camera_metadata((camera_metadata_t*)i->settings);
791 return mPendingRequestsList.erase(i);
792}
793
794/*===========================================================================
795 * FUNCTION : camEvtHandle
796 *
797 * DESCRIPTION: Function registered to mm-camera-interface to handle events
798 *
799 * PARAMETERS :
800 * @camera_handle : interface layer camera handle
801 * @evt : ptr to event
802 * @user_data : user data ptr
803 *
804 * RETURN : none
805 *==========================================================================*/
806void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
807 mm_camera_event_t *evt,
808 void *user_data)
809{
810 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
811 if (obj && evt) {
812 switch(evt->server_event_type) {
813 case CAM_EVENT_TYPE_DAEMON_DIED:
814 pthread_mutex_lock(&obj->mMutex);
815 obj->mState = ERROR;
816 pthread_mutex_unlock(&obj->mMutex);
817 LOGE("Fatal, camera daemon died");
818 break;
819
820 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
821 LOGD("HAL got request pull from Daemon");
822 pthread_mutex_lock(&obj->mMutex);
823 obj->mWokenUpByDaemon = true;
824 obj->unblockRequestIfNecessary();
825 pthread_mutex_unlock(&obj->mMutex);
826 break;
827
828 default:
829 LOGW("Warning: Unhandled event %d",
830 evt->server_event_type);
831 break;
832 }
833 } else {
834 LOGE("NULL user_data/evt");
835 }
836}
837
838/*===========================================================================
839 * FUNCTION : openCamera
840 *
841 * DESCRIPTION: open camera
842 *
843 * PARAMETERS :
844 * @hw_device : double ptr for camera device struct
845 *
846 * RETURN : int32_t type of status
847 * NO_ERROR -- success
848 * none-zero failure code
849 *==========================================================================*/
850int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
851{
852 int rc = 0;
853 if (mState != CLOSED) {
854 *hw_device = NULL;
855 return PERMISSION_DENIED;
856 }
857
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700858 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800859 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700860 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
861 mCameraId);
862
863 rc = openCamera();
864 if (rc == 0) {
865 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800866 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700867 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800868 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700869
Thierry Strudel3d639192016-09-09 11:52:26 -0700870 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
871 mCameraId, rc);
872
873 if (rc == NO_ERROR) {
874 mState = OPENED;
875 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800876
Thierry Strudel3d639192016-09-09 11:52:26 -0700877 return rc;
878}
879
880/*===========================================================================
881 * FUNCTION : openCamera
882 *
883 * DESCRIPTION: open camera
884 *
885 * PARAMETERS : none
886 *
887 * RETURN : int32_t type of status
888 * NO_ERROR -- success
889 * none-zero failure code
890 *==========================================================================*/
891int QCamera3HardwareInterface::openCamera()
892{
893 int rc = 0;
894 char value[PROPERTY_VALUE_MAX];
895
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800896 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700897 if (mCameraHandle) {
898 LOGE("Failure: Camera already opened");
899 return ALREADY_EXISTS;
900 }
901
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700902 {
903 Mutex::Autolock l(gHdrPlusClientLock);
904 if (gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700905 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700906 rc = gEaselManagerClient.resume();
907 if (rc != 0) {
908 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
909 return rc;
910 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800911 }
912 }
913
Thierry Strudel3d639192016-09-09 11:52:26 -0700914 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
915 if (rc < 0) {
916 LOGE("Failed to reserve flash for camera id: %d",
917 mCameraId);
918 return UNKNOWN_ERROR;
919 }
920
921 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
922 if (rc) {
923 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
924 return rc;
925 }
926
927 if (!mCameraHandle) {
928 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
929 return -ENODEV;
930 }
931
932 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
933 camEvtHandle, (void *)this);
934
935 if (rc < 0) {
936 LOGE("Error, failed to register event callback");
937 /* Not closing camera here since it is already handled in destructor */
938 return FAILED_TRANSACTION;
939 }
940
941 mExifParams.debug_params =
942 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
943 if (mExifParams.debug_params) {
944 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
945 } else {
946 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
947 return NO_MEMORY;
948 }
949 mFirstConfiguration = true;
950
951 //Notify display HAL that a camera session is active.
952 //But avoid calling the same during bootup because camera service might open/close
953 //cameras at boot time during its initialization and display service will also internally
954 //wait for camera service to initialize first while calling this display API, resulting in a
955 //deadlock situation. Since boot time camera open/close calls are made only to fetch
956 //capabilities, no need of this display bw optimization.
957 //Use "service.bootanim.exit" property to know boot status.
958 property_get("service.bootanim.exit", value, "0");
959 if (atoi(value) == 1) {
960 pthread_mutex_lock(&gCamLock);
961 if (gNumCameraSessions++ == 0) {
962 setCameraLaunchStatus(true);
963 }
964 pthread_mutex_unlock(&gCamLock);
965 }
966
967 //fill the session id needed while linking dual cam
968 pthread_mutex_lock(&gCamLock);
969 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
970 &sessionId[mCameraId]);
971 pthread_mutex_unlock(&gCamLock);
972
973 if (rc < 0) {
974 LOGE("Error, failed to get sessiion id");
975 return UNKNOWN_ERROR;
976 } else {
977 //Allocate related cam sync buffer
978 //this is needed for the payload that goes along with bundling cmd for related
979 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700980 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
981 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700982 if(rc != OK) {
983 rc = NO_MEMORY;
984 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
985 return NO_MEMORY;
986 }
987
988 //Map memory for related cam sync buffer
989 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700990 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
991 m_pDualCamCmdHeap->getFd(0),
992 sizeof(cam_dual_camera_cmd_info_t),
993 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700994 if(rc < 0) {
995 LOGE("Dualcam: failed to map Related cam sync buffer");
996 rc = FAILED_TRANSACTION;
997 return NO_MEMORY;
998 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700999 m_pDualCamCmdPtr =
1000 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001001 }
1002
1003 LOGH("mCameraId=%d",mCameraId);
1004
1005 return NO_ERROR;
1006}
1007
1008/*===========================================================================
1009 * FUNCTION : closeCamera
1010 *
1011 * DESCRIPTION: close camera
1012 *
1013 * PARAMETERS : none
1014 *
1015 * RETURN : int32_t type of status
1016 * NO_ERROR -- success
1017 * none-zero failure code
1018 *==========================================================================*/
1019int QCamera3HardwareInterface::closeCamera()
1020{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001021 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001022 int rc = NO_ERROR;
1023 char value[PROPERTY_VALUE_MAX];
1024
1025 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1026 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001027
1028 // unmap memory for related cam sync buffer
1029 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001030 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001031 if (NULL != m_pDualCamCmdHeap) {
1032 m_pDualCamCmdHeap->deallocate();
1033 delete m_pDualCamCmdHeap;
1034 m_pDualCamCmdHeap = NULL;
1035 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001036 }
1037
Thierry Strudel3d639192016-09-09 11:52:26 -07001038 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1039 mCameraHandle = NULL;
1040
1041 //reset session id to some invalid id
1042 pthread_mutex_lock(&gCamLock);
1043 sessionId[mCameraId] = 0xDEADBEEF;
1044 pthread_mutex_unlock(&gCamLock);
1045
1046 //Notify display HAL that there is no active camera session
1047 //but avoid calling the same during bootup. Refer to openCamera
1048 //for more details.
1049 property_get("service.bootanim.exit", value, "0");
1050 if (atoi(value) == 1) {
1051 pthread_mutex_lock(&gCamLock);
1052 if (--gNumCameraSessions == 0) {
1053 setCameraLaunchStatus(false);
1054 }
1055 pthread_mutex_unlock(&gCamLock);
1056 }
1057
Thierry Strudel3d639192016-09-09 11:52:26 -07001058 if (mExifParams.debug_params) {
1059 free(mExifParams.debug_params);
1060 mExifParams.debug_params = NULL;
1061 }
1062 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1063 LOGW("Failed to release flash for camera id: %d",
1064 mCameraId);
1065 }
1066 mState = CLOSED;
1067 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1068 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001069
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001070 {
1071 Mutex::Autolock l(gHdrPlusClientLock);
1072 if (gHdrPlusClient != nullptr) {
1073 // Disable HDR+ mode.
1074 disableHdrPlusModeLocked();
1075 // Disconnect Easel if it's connected.
1076 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1077 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001078 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001079
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001080 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001081 rc = gEaselManagerClient.stopMipi(mCameraId);
1082 if (rc != 0) {
1083 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1084 }
1085
1086 rc = gEaselManagerClient.suspend();
1087 if (rc != 0) {
1088 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1089 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001090 }
1091 }
1092
Thierry Strudel3d639192016-09-09 11:52:26 -07001093 return rc;
1094}
1095
1096/*===========================================================================
1097 * FUNCTION : initialize
1098 *
1099 * DESCRIPTION: Initialize frameworks callback functions
1100 *
1101 * PARAMETERS :
1102 * @callback_ops : callback function to frameworks
1103 *
1104 * RETURN :
1105 *
1106 *==========================================================================*/
1107int QCamera3HardwareInterface::initialize(
1108 const struct camera3_callback_ops *callback_ops)
1109{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001110 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001111 int rc;
1112
1113 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1114 pthread_mutex_lock(&mMutex);
1115
1116 // Validate current state
1117 switch (mState) {
1118 case OPENED:
1119 /* valid state */
1120 break;
1121 default:
1122 LOGE("Invalid state %d", mState);
1123 rc = -ENODEV;
1124 goto err1;
1125 }
1126
1127 rc = initParameters();
1128 if (rc < 0) {
1129 LOGE("initParamters failed %d", rc);
1130 goto err1;
1131 }
1132 mCallbackOps = callback_ops;
1133
1134 mChannelHandle = mCameraHandle->ops->add_channel(
1135 mCameraHandle->camera_handle, NULL, NULL, this);
1136 if (mChannelHandle == 0) {
1137 LOGE("add_channel failed");
1138 rc = -ENOMEM;
1139 pthread_mutex_unlock(&mMutex);
1140 return rc;
1141 }
1142
1143 pthread_mutex_unlock(&mMutex);
1144 mCameraInitialized = true;
1145 mState = INITIALIZED;
1146 LOGI("X");
1147 return 0;
1148
1149err1:
1150 pthread_mutex_unlock(&mMutex);
1151 return rc;
1152}
1153
1154/*===========================================================================
1155 * FUNCTION : validateStreamDimensions
1156 *
1157 * DESCRIPTION: Check if the configuration requested are those advertised
1158 *
1159 * PARAMETERS :
1160 * @stream_list : streams to be configured
1161 *
1162 * RETURN :
1163 *
1164 *==========================================================================*/
1165int QCamera3HardwareInterface::validateStreamDimensions(
1166 camera3_stream_configuration_t *streamList)
1167{
1168 int rc = NO_ERROR;
1169 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001170 uint32_t depthWidth = 0;
1171 uint32_t depthHeight = 0;
1172 if (mPDSupported) {
1173 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1174 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1175 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001176
1177 camera3_stream_t *inputStream = NULL;
1178 /*
1179 * Loop through all streams to find input stream if it exists*
1180 */
1181 for (size_t i = 0; i< streamList->num_streams; i++) {
1182 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1183 if (inputStream != NULL) {
1184 LOGE("Error, Multiple input streams requested");
1185 return -EINVAL;
1186 }
1187 inputStream = streamList->streams[i];
1188 }
1189 }
1190 /*
1191 * Loop through all streams requested in configuration
1192 * Check if unsupported sizes have been requested on any of them
1193 */
1194 for (size_t j = 0; j < streamList->num_streams; j++) {
1195 bool sizeFound = false;
1196 camera3_stream_t *newStream = streamList->streams[j];
1197
1198 uint32_t rotatedHeight = newStream->height;
1199 uint32_t rotatedWidth = newStream->width;
1200 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1201 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1202 rotatedHeight = newStream->width;
1203 rotatedWidth = newStream->height;
1204 }
1205
1206 /*
1207 * Sizes are different for each type of stream format check against
1208 * appropriate table.
1209 */
1210 switch (newStream->format) {
1211 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1212 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1213 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001214 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1215 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1216 mPDSupported) {
1217 if ((depthWidth == newStream->width) &&
1218 (depthHeight == newStream->height)) {
1219 sizeFound = true;
1220 }
1221 break;
1222 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001223 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1224 for (size_t i = 0; i < count; i++) {
1225 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1226 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1227 sizeFound = true;
1228 break;
1229 }
1230 }
1231 break;
1232 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001233 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1234 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001235 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001236 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001237 if ((depthSamplesCount == newStream->width) &&
1238 (1 == newStream->height)) {
1239 sizeFound = true;
1240 }
1241 break;
1242 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001243 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1244 /* Verify set size against generated sizes table */
1245 for (size_t i = 0; i < count; i++) {
1246 if (((int32_t)rotatedWidth ==
1247 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1248 ((int32_t)rotatedHeight ==
1249 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1250 sizeFound = true;
1251 break;
1252 }
1253 }
1254 break;
1255 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1256 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1257 default:
1258 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1259 || newStream->stream_type == CAMERA3_STREAM_INPUT
1260 || IS_USAGE_ZSL(newStream->usage)) {
1261 if (((int32_t)rotatedWidth ==
1262 gCamCapability[mCameraId]->active_array_size.width) &&
1263 ((int32_t)rotatedHeight ==
1264 gCamCapability[mCameraId]->active_array_size.height)) {
1265 sizeFound = true;
1266 break;
1267 }
1268 /* We could potentially break here to enforce ZSL stream
1269 * set from frameworks always is full active array size
1270 * but it is not clear from the spc if framework will always
1271 * follow that, also we have logic to override to full array
1272 * size, so keeping the logic lenient at the moment
1273 */
1274 }
1275 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1276 MAX_SIZES_CNT);
1277 for (size_t i = 0; i < count; i++) {
1278 if (((int32_t)rotatedWidth ==
1279 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1280 ((int32_t)rotatedHeight ==
1281 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1282 sizeFound = true;
1283 break;
1284 }
1285 }
1286 break;
1287 } /* End of switch(newStream->format) */
1288
1289 /* We error out even if a single stream has unsupported size set */
1290 if (!sizeFound) {
1291 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1292 rotatedWidth, rotatedHeight, newStream->format,
1293 gCamCapability[mCameraId]->active_array_size.width,
1294 gCamCapability[mCameraId]->active_array_size.height);
1295 rc = -EINVAL;
1296 break;
1297 }
1298 } /* End of for each stream */
1299 return rc;
1300}
1301
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001302/*===========================================================================
1303 * FUNCTION : validateUsageFlags
1304 *
1305 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1306 *
1307 * PARAMETERS :
1308 * @stream_list : streams to be configured
1309 *
1310 * RETURN :
1311 * NO_ERROR if the usage flags are supported
1312 * error code if usage flags are not supported
1313 *
1314 *==========================================================================*/
1315int QCamera3HardwareInterface::validateUsageFlags(
1316 const camera3_stream_configuration_t* streamList)
1317{
1318 for (size_t j = 0; j < streamList->num_streams; j++) {
1319 const camera3_stream_t *newStream = streamList->streams[j];
1320
1321 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1322 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1323 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1324 continue;
1325 }
1326
1327 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1328 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1329 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1330 bool forcePreviewUBWC = true;
1331 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1332 forcePreviewUBWC = false;
1333 }
1334 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1335 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1336 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1337 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1338 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1339 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1340
1341 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1342 // So color spaces will always match.
1343
1344 // Check whether underlying formats of shared streams match.
1345 if (isVideo && isPreview && videoFormat != previewFormat) {
1346 LOGE("Combined video and preview usage flag is not supported");
1347 return -EINVAL;
1348 }
1349 if (isPreview && isZSL && previewFormat != zslFormat) {
1350 LOGE("Combined preview and zsl usage flag is not supported");
1351 return -EINVAL;
1352 }
1353 if (isVideo && isZSL && videoFormat != zslFormat) {
1354 LOGE("Combined video and zsl usage flag is not supported");
1355 return -EINVAL;
1356 }
1357 }
1358 return NO_ERROR;
1359}
1360
1361/*===========================================================================
1362 * FUNCTION : validateUsageFlagsForEis
1363 *
1364 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1365 *
1366 * PARAMETERS :
1367 * @stream_list : streams to be configured
1368 *
1369 * RETURN :
1370 * NO_ERROR if the usage flags are supported
1371 * error code if usage flags are not supported
1372 *
1373 *==========================================================================*/
1374int QCamera3HardwareInterface::validateUsageFlagsForEis(
1375 const camera3_stream_configuration_t* streamList)
1376{
1377 for (size_t j = 0; j < streamList->num_streams; j++) {
1378 const camera3_stream_t *newStream = streamList->streams[j];
1379
1380 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1381 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1382
1383 // Because EIS is "hard-coded" for certain use case, and current
1384 // implementation doesn't support shared preview and video on the same
1385 // stream, return failure if EIS is forced on.
1386 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1387 LOGE("Combined video and preview usage flag is not supported due to EIS");
1388 return -EINVAL;
1389 }
1390 }
1391 return NO_ERROR;
1392}
1393
Thierry Strudel3d639192016-09-09 11:52:26 -07001394/*==============================================================================
1395 * FUNCTION : isSupportChannelNeeded
1396 *
1397 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1398 *
1399 * PARAMETERS :
1400 * @stream_list : streams to be configured
1401 * @stream_config_info : the config info for streams to be configured
1402 *
1403 * RETURN : Boolen true/false decision
1404 *
1405 *==========================================================================*/
1406bool QCamera3HardwareInterface::isSupportChannelNeeded(
1407 camera3_stream_configuration_t *streamList,
1408 cam_stream_size_info_t stream_config_info)
1409{
1410 uint32_t i;
1411 bool pprocRequested = false;
1412 /* Check for conditions where PProc pipeline does not have any streams*/
1413 for (i = 0; i < stream_config_info.num_streams; i++) {
1414 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1415 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1416 pprocRequested = true;
1417 break;
1418 }
1419 }
1420
1421 if (pprocRequested == false )
1422 return true;
1423
1424 /* Dummy stream needed if only raw or jpeg streams present */
1425 for (i = 0; i < streamList->num_streams; i++) {
1426 switch(streamList->streams[i]->format) {
1427 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1428 case HAL_PIXEL_FORMAT_RAW10:
1429 case HAL_PIXEL_FORMAT_RAW16:
1430 case HAL_PIXEL_FORMAT_BLOB:
1431 break;
1432 default:
1433 return false;
1434 }
1435 }
1436 return true;
1437}
1438
1439/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001440 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001441 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001442 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001443 *
1444 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001445 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001446 *
1447 * RETURN : int32_t type of status
1448 * NO_ERROR -- success
1449 * none-zero failure code
1450 *
1451 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001452int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001453{
1454 int32_t rc = NO_ERROR;
1455
1456 cam_dimension_t max_dim = {0, 0};
1457 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1458 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1459 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1460 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1461 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1462 }
1463
1464 clear_metadata_buffer(mParameters);
1465
1466 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1467 max_dim);
1468 if (rc != NO_ERROR) {
1469 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1470 return rc;
1471 }
1472
1473 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1474 if (rc != NO_ERROR) {
1475 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1476 return rc;
1477 }
1478
1479 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001480 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001481
1482 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1483 mParameters);
1484 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001485 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001486 return rc;
1487 }
1488
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001489 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001490 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1491 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1492 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1493 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1494 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001495
1496 return rc;
1497}
1498
1499/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001500 * FUNCTION : addToPPFeatureMask
1501 *
1502 * DESCRIPTION: add additional features to pp feature mask based on
1503 * stream type and usecase
1504 *
1505 * PARAMETERS :
1506 * @stream_format : stream type for feature mask
1507 * @stream_idx : stream idx within postprocess_mask list to change
1508 *
1509 * RETURN : NULL
1510 *
1511 *==========================================================================*/
1512void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1513 uint32_t stream_idx)
1514{
1515 char feature_mask_value[PROPERTY_VALUE_MAX];
1516 cam_feature_mask_t feature_mask;
1517 int args_converted;
1518 int property_len;
1519
1520 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001521#ifdef _LE_CAMERA_
1522 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1523 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1524 property_len = property_get("persist.camera.hal3.feature",
1525 feature_mask_value, swtnr_feature_mask_value);
1526#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001527 property_len = property_get("persist.camera.hal3.feature",
1528 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001529#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001530 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1531 (feature_mask_value[1] == 'x')) {
1532 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1533 } else {
1534 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1535 }
1536 if (1 != args_converted) {
1537 feature_mask = 0;
1538 LOGE("Wrong feature mask %s", feature_mask_value);
1539 return;
1540 }
1541
1542 switch (stream_format) {
1543 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1544 /* Add LLVD to pp feature mask only if video hint is enabled */
1545 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1546 mStreamConfigInfo.postprocess_mask[stream_idx]
1547 |= CAM_QTI_FEATURE_SW_TNR;
1548 LOGH("Added SW TNR to pp feature mask");
1549 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1550 mStreamConfigInfo.postprocess_mask[stream_idx]
1551 |= CAM_QCOM_FEATURE_LLVD;
1552 LOGH("Added LLVD SeeMore to pp feature mask");
1553 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001554 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1555 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1556 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1557 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001558 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1559 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1560 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1561 CAM_QTI_FEATURE_BINNING_CORRECTION;
1562 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001563 break;
1564 }
1565 default:
1566 break;
1567 }
1568 LOGD("PP feature mask %llx",
1569 mStreamConfigInfo.postprocess_mask[stream_idx]);
1570}
1571
1572/*==============================================================================
1573 * FUNCTION : updateFpsInPreviewBuffer
1574 *
1575 * DESCRIPTION: update FPS information in preview buffer.
1576 *
1577 * PARAMETERS :
1578 * @metadata : pointer to metadata buffer
1579 * @frame_number: frame_number to look for in pending buffer list
1580 *
1581 * RETURN : None
1582 *
1583 *==========================================================================*/
1584void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1585 uint32_t frame_number)
1586{
1587 // Mark all pending buffers for this particular request
1588 // with corresponding framerate information
1589 for (List<PendingBuffersInRequest>::iterator req =
1590 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1591 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1592 for(List<PendingBufferInfo>::iterator j =
1593 req->mPendingBufferList.begin();
1594 j != req->mPendingBufferList.end(); j++) {
1595 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1596 if ((req->frame_number == frame_number) &&
1597 (channel->getStreamTypeMask() &
1598 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1599 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1600 CAM_INTF_PARM_FPS_RANGE, metadata) {
1601 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1602 struct private_handle_t *priv_handle =
1603 (struct private_handle_t *)(*(j->buffer));
1604 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1605 }
1606 }
1607 }
1608 }
1609}
1610
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001611/*==============================================================================
1612 * FUNCTION : updateTimeStampInPendingBuffers
1613 *
1614 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1615 * of a frame number
1616 *
1617 * PARAMETERS :
1618 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1619 * @timestamp : timestamp to be set
1620 *
1621 * RETURN : None
1622 *
1623 *==========================================================================*/
1624void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1625 uint32_t frameNumber, nsecs_t timestamp)
1626{
1627 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1628 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1629 if (req->frame_number != frameNumber)
1630 continue;
1631
1632 for (auto k = req->mPendingBufferList.begin();
1633 k != req->mPendingBufferList.end(); k++ ) {
1634 struct private_handle_t *priv_handle =
1635 (struct private_handle_t *) (*(k->buffer));
1636 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1637 }
1638 }
1639 return;
1640}
1641
Thierry Strudel3d639192016-09-09 11:52:26 -07001642/*===========================================================================
1643 * FUNCTION : configureStreams
1644 *
1645 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1646 * and output streams.
1647 *
1648 * PARAMETERS :
1649 * @stream_list : streams to be configured
1650 *
1651 * RETURN :
1652 *
1653 *==========================================================================*/
1654int QCamera3HardwareInterface::configureStreams(
1655 camera3_stream_configuration_t *streamList)
1656{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001657 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001658 int rc = 0;
1659
1660 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001661 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001662 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001663 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001664
1665 return rc;
1666}
1667
1668/*===========================================================================
1669 * FUNCTION : configureStreamsPerfLocked
1670 *
1671 * DESCRIPTION: configureStreams while perfLock is held.
1672 *
1673 * PARAMETERS :
1674 * @stream_list : streams to be configured
1675 *
1676 * RETURN : int32_t type of status
1677 * NO_ERROR -- success
1678 * none-zero failure code
1679 *==========================================================================*/
1680int QCamera3HardwareInterface::configureStreamsPerfLocked(
1681 camera3_stream_configuration_t *streamList)
1682{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001683 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001684 int rc = 0;
1685
1686 // Sanity check stream_list
1687 if (streamList == NULL) {
1688 LOGE("NULL stream configuration");
1689 return BAD_VALUE;
1690 }
1691 if (streamList->streams == NULL) {
1692 LOGE("NULL stream list");
1693 return BAD_VALUE;
1694 }
1695
1696 if (streamList->num_streams < 1) {
1697 LOGE("Bad number of streams requested: %d",
1698 streamList->num_streams);
1699 return BAD_VALUE;
1700 }
1701
1702 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1703 LOGE("Maximum number of streams %d exceeded: %d",
1704 MAX_NUM_STREAMS, streamList->num_streams);
1705 return BAD_VALUE;
1706 }
1707
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001708 rc = validateUsageFlags(streamList);
1709 if (rc != NO_ERROR) {
1710 return rc;
1711 }
1712
Thierry Strudel3d639192016-09-09 11:52:26 -07001713 mOpMode = streamList->operation_mode;
1714 LOGD("mOpMode: %d", mOpMode);
1715
1716 /* first invalidate all the steams in the mStreamList
1717 * if they appear again, they will be validated */
1718 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1719 it != mStreamInfo.end(); it++) {
1720 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1721 if (channel) {
1722 channel->stop();
1723 }
1724 (*it)->status = INVALID;
1725 }
1726
1727 if (mRawDumpChannel) {
1728 mRawDumpChannel->stop();
1729 delete mRawDumpChannel;
1730 mRawDumpChannel = NULL;
1731 }
1732
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001733 if (mHdrPlusRawSrcChannel) {
1734 mHdrPlusRawSrcChannel->stop();
1735 delete mHdrPlusRawSrcChannel;
1736 mHdrPlusRawSrcChannel = NULL;
1737 }
1738
Thierry Strudel3d639192016-09-09 11:52:26 -07001739 if (mSupportChannel)
1740 mSupportChannel->stop();
1741
1742 if (mAnalysisChannel) {
1743 mAnalysisChannel->stop();
1744 }
1745 if (mMetadataChannel) {
1746 /* If content of mStreamInfo is not 0, there is metadata stream */
1747 mMetadataChannel->stop();
1748 }
1749 if (mChannelHandle) {
1750 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1751 mChannelHandle);
1752 LOGD("stopping channel %d", mChannelHandle);
1753 }
1754
1755 pthread_mutex_lock(&mMutex);
1756
1757 // Check state
1758 switch (mState) {
1759 case INITIALIZED:
1760 case CONFIGURED:
1761 case STARTED:
1762 /* valid state */
1763 break;
1764 default:
1765 LOGE("Invalid state %d", mState);
1766 pthread_mutex_unlock(&mMutex);
1767 return -ENODEV;
1768 }
1769
1770 /* Check whether we have video stream */
1771 m_bIs4KVideo = false;
1772 m_bIsVideo = false;
1773 m_bEisSupportedSize = false;
1774 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001775 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001776 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001777 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001778 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001779 uint32_t videoWidth = 0U;
1780 uint32_t videoHeight = 0U;
1781 size_t rawStreamCnt = 0;
1782 size_t stallStreamCnt = 0;
1783 size_t processedStreamCnt = 0;
1784 // Number of streams on ISP encoder path
1785 size_t numStreamsOnEncoder = 0;
1786 size_t numYuv888OnEncoder = 0;
1787 bool bYuv888OverrideJpeg = false;
1788 cam_dimension_t largeYuv888Size = {0, 0};
1789 cam_dimension_t maxViewfinderSize = {0, 0};
1790 bool bJpegExceeds4K = false;
1791 bool bJpegOnEncoder = false;
1792 bool bUseCommonFeatureMask = false;
1793 cam_feature_mask_t commonFeatureMask = 0;
1794 bool bSmallJpegSize = false;
1795 uint32_t width_ratio;
1796 uint32_t height_ratio;
1797 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1798 camera3_stream_t *inputStream = NULL;
1799 bool isJpeg = false;
1800 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001801 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001802 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001803
1804 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1805
1806 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001807 uint8_t eis_prop_set;
1808 uint32_t maxEisWidth = 0;
1809 uint32_t maxEisHeight = 0;
1810
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001811 // Initialize all instant AEC related variables
1812 mInstantAEC = false;
1813 mResetInstantAEC = false;
1814 mInstantAECSettledFrameNumber = 0;
1815 mAecSkipDisplayFrameBound = 0;
1816 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001817 mCurrFeatureState = 0;
1818 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001819
Thierry Strudel3d639192016-09-09 11:52:26 -07001820 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1821
1822 size_t count = IS_TYPE_MAX;
1823 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1824 for (size_t i = 0; i < count; i++) {
1825 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001826 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1827 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001828 break;
1829 }
1830 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001831
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001832 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001833 maxEisWidth = MAX_EIS_WIDTH;
1834 maxEisHeight = MAX_EIS_HEIGHT;
1835 }
1836
1837 /* EIS setprop control */
1838 char eis_prop[PROPERTY_VALUE_MAX];
1839 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001840 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001841 eis_prop_set = (uint8_t)atoi(eis_prop);
1842
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001843 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001844 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1845
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001846 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1847 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001848
Thierry Strudel3d639192016-09-09 11:52:26 -07001849 /* stream configurations */
1850 for (size_t i = 0; i < streamList->num_streams; i++) {
1851 camera3_stream_t *newStream = streamList->streams[i];
1852 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1853 "height = %d, rotation = %d, usage = 0x%x",
1854 i, newStream->stream_type, newStream->format,
1855 newStream->width, newStream->height, newStream->rotation,
1856 newStream->usage);
1857 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1858 newStream->stream_type == CAMERA3_STREAM_INPUT){
1859 isZsl = true;
1860 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001861 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1862 IS_USAGE_PREVIEW(newStream->usage)) {
1863 isPreview = true;
1864 }
1865
Thierry Strudel3d639192016-09-09 11:52:26 -07001866 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1867 inputStream = newStream;
1868 }
1869
Emilian Peev7650c122017-01-19 08:24:33 -08001870 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1871 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001872 isJpeg = true;
1873 jpegSize.width = newStream->width;
1874 jpegSize.height = newStream->height;
1875 if (newStream->width > VIDEO_4K_WIDTH ||
1876 newStream->height > VIDEO_4K_HEIGHT)
1877 bJpegExceeds4K = true;
1878 }
1879
1880 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1881 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1882 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001883 // In HAL3 we can have multiple different video streams.
1884 // The variables video width and height are used below as
1885 // dimensions of the biggest of them
1886 if (videoWidth < newStream->width ||
1887 videoHeight < newStream->height) {
1888 videoWidth = newStream->width;
1889 videoHeight = newStream->height;
1890 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001891 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1892 (VIDEO_4K_HEIGHT <= newStream->height)) {
1893 m_bIs4KVideo = true;
1894 }
1895 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1896 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001897
Thierry Strudel3d639192016-09-09 11:52:26 -07001898 }
1899 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1900 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1901 switch (newStream->format) {
1902 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001903 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1904 depthPresent = true;
1905 break;
1906 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001907 stallStreamCnt++;
1908 if (isOnEncoder(maxViewfinderSize, newStream->width,
1909 newStream->height)) {
1910 numStreamsOnEncoder++;
1911 bJpegOnEncoder = true;
1912 }
1913 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1914 newStream->width);
1915 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1916 newStream->height);;
1917 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1918 "FATAL: max_downscale_factor cannot be zero and so assert");
1919 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1920 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1921 LOGH("Setting small jpeg size flag to true");
1922 bSmallJpegSize = true;
1923 }
1924 break;
1925 case HAL_PIXEL_FORMAT_RAW10:
1926 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1927 case HAL_PIXEL_FORMAT_RAW16:
1928 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001929 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1930 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1931 pdStatCount++;
1932 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001933 break;
1934 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1935 processedStreamCnt++;
1936 if (isOnEncoder(maxViewfinderSize, newStream->width,
1937 newStream->height)) {
1938 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1939 !IS_USAGE_ZSL(newStream->usage)) {
1940 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1941 }
1942 numStreamsOnEncoder++;
1943 }
1944 break;
1945 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1946 processedStreamCnt++;
1947 if (isOnEncoder(maxViewfinderSize, newStream->width,
1948 newStream->height)) {
1949 // If Yuv888 size is not greater than 4K, set feature mask
1950 // to SUPERSET so that it support concurrent request on
1951 // YUV and JPEG.
1952 if (newStream->width <= VIDEO_4K_WIDTH &&
1953 newStream->height <= VIDEO_4K_HEIGHT) {
1954 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1955 }
1956 numStreamsOnEncoder++;
1957 numYuv888OnEncoder++;
1958 largeYuv888Size.width = newStream->width;
1959 largeYuv888Size.height = newStream->height;
1960 }
1961 break;
1962 default:
1963 processedStreamCnt++;
1964 if (isOnEncoder(maxViewfinderSize, newStream->width,
1965 newStream->height)) {
1966 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1967 numStreamsOnEncoder++;
1968 }
1969 break;
1970 }
1971
1972 }
1973 }
1974
1975 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1976 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1977 !m_bIsVideo) {
1978 m_bEisEnable = false;
1979 }
1980
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001981 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1982 pthread_mutex_unlock(&mMutex);
1983 return -EINVAL;
1984 }
1985
Thierry Strudel54dc9782017-02-15 12:12:10 -08001986 uint8_t forceEnableTnr = 0;
1987 char tnr_prop[PROPERTY_VALUE_MAX];
1988 memset(tnr_prop, 0, sizeof(tnr_prop));
1989 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1990 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1991
Thierry Strudel3d639192016-09-09 11:52:26 -07001992 /* Logic to enable/disable TNR based on specific config size/etc.*/
1993 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001994 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1995 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001996 else if (forceEnableTnr)
1997 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001998
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001999 char videoHdrProp[PROPERTY_VALUE_MAX];
2000 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2001 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2002 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2003
2004 if (hdr_mode_prop == 1 && m_bIsVideo &&
2005 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2006 m_bVideoHdrEnabled = true;
2007 else
2008 m_bVideoHdrEnabled = false;
2009
2010
Thierry Strudel3d639192016-09-09 11:52:26 -07002011 /* Check if num_streams is sane */
2012 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2013 rawStreamCnt > MAX_RAW_STREAMS ||
2014 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2015 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2016 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2017 pthread_mutex_unlock(&mMutex);
2018 return -EINVAL;
2019 }
2020 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002021 if (isZsl && m_bIs4KVideo) {
2022 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002023 pthread_mutex_unlock(&mMutex);
2024 return -EINVAL;
2025 }
2026 /* Check if stream sizes are sane */
2027 if (numStreamsOnEncoder > 2) {
2028 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2029 pthread_mutex_unlock(&mMutex);
2030 return -EINVAL;
2031 } else if (1 < numStreamsOnEncoder){
2032 bUseCommonFeatureMask = true;
2033 LOGH("Multiple streams above max viewfinder size, common mask needed");
2034 }
2035
2036 /* Check if BLOB size is greater than 4k in 4k recording case */
2037 if (m_bIs4KVideo && bJpegExceeds4K) {
2038 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2039 pthread_mutex_unlock(&mMutex);
2040 return -EINVAL;
2041 }
2042
Emilian Peev7650c122017-01-19 08:24:33 -08002043 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2044 depthPresent) {
2045 LOGE("HAL doesn't support depth streams in HFR mode!");
2046 pthread_mutex_unlock(&mMutex);
2047 return -EINVAL;
2048 }
2049
Thierry Strudel3d639192016-09-09 11:52:26 -07002050 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2051 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2052 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2053 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2054 // configurations:
2055 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2056 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2057 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2058 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2059 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2060 __func__);
2061 pthread_mutex_unlock(&mMutex);
2062 return -EINVAL;
2063 }
2064
2065 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2066 // the YUV stream's size is greater or equal to the JPEG size, set common
2067 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2068 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2069 jpegSize.width, jpegSize.height) &&
2070 largeYuv888Size.width > jpegSize.width &&
2071 largeYuv888Size.height > jpegSize.height) {
2072 bYuv888OverrideJpeg = true;
2073 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2074 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2075 }
2076
2077 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2078 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2079 commonFeatureMask);
2080 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2081 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2082
2083 rc = validateStreamDimensions(streamList);
2084 if (rc == NO_ERROR) {
2085 rc = validateStreamRotations(streamList);
2086 }
2087 if (rc != NO_ERROR) {
2088 LOGE("Invalid stream configuration requested!");
2089 pthread_mutex_unlock(&mMutex);
2090 return rc;
2091 }
2092
Emilian Peev0f3c3162017-03-15 12:57:46 +00002093 if (1 < pdStatCount) {
2094 LOGE("HAL doesn't support multiple PD streams");
2095 pthread_mutex_unlock(&mMutex);
2096 return -EINVAL;
2097 }
2098
2099 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2100 (1 == pdStatCount)) {
2101 LOGE("HAL doesn't support PD streams in HFR mode!");
2102 pthread_mutex_unlock(&mMutex);
2103 return -EINVAL;
2104 }
2105
Thierry Strudel3d639192016-09-09 11:52:26 -07002106 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2107 for (size_t i = 0; i < streamList->num_streams; i++) {
2108 camera3_stream_t *newStream = streamList->streams[i];
2109 LOGH("newStream type = %d, stream format = %d "
2110 "stream size : %d x %d, stream rotation = %d",
2111 newStream->stream_type, newStream->format,
2112 newStream->width, newStream->height, newStream->rotation);
2113 //if the stream is in the mStreamList validate it
2114 bool stream_exists = false;
2115 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2116 it != mStreamInfo.end(); it++) {
2117 if ((*it)->stream == newStream) {
2118 QCamera3ProcessingChannel *channel =
2119 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2120 stream_exists = true;
2121 if (channel)
2122 delete channel;
2123 (*it)->status = VALID;
2124 (*it)->stream->priv = NULL;
2125 (*it)->channel = NULL;
2126 }
2127 }
2128 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2129 //new stream
2130 stream_info_t* stream_info;
2131 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2132 if (!stream_info) {
2133 LOGE("Could not allocate stream info");
2134 rc = -ENOMEM;
2135 pthread_mutex_unlock(&mMutex);
2136 return rc;
2137 }
2138 stream_info->stream = newStream;
2139 stream_info->status = VALID;
2140 stream_info->channel = NULL;
2141 mStreamInfo.push_back(stream_info);
2142 }
2143 /* Covers Opaque ZSL and API1 F/W ZSL */
2144 if (IS_USAGE_ZSL(newStream->usage)
2145 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2146 if (zslStream != NULL) {
2147 LOGE("Multiple input/reprocess streams requested!");
2148 pthread_mutex_unlock(&mMutex);
2149 return BAD_VALUE;
2150 }
2151 zslStream = newStream;
2152 }
2153 /* Covers YUV reprocess */
2154 if (inputStream != NULL) {
2155 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2156 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2157 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2158 && inputStream->width == newStream->width
2159 && inputStream->height == newStream->height) {
2160 if (zslStream != NULL) {
2161 /* This scenario indicates multiple YUV streams with same size
2162 * as input stream have been requested, since zsl stream handle
2163 * is solely use for the purpose of overriding the size of streams
2164 * which share h/w streams we will just make a guess here as to
2165 * which of the stream is a ZSL stream, this will be refactored
2166 * once we make generic logic for streams sharing encoder output
2167 */
2168 LOGH("Warning, Multiple ip/reprocess streams requested!");
2169 }
2170 zslStream = newStream;
2171 }
2172 }
2173 }
2174
2175 /* If a zsl stream is set, we know that we have configured at least one input or
2176 bidirectional stream */
2177 if (NULL != zslStream) {
2178 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2179 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2180 mInputStreamInfo.format = zslStream->format;
2181 mInputStreamInfo.usage = zslStream->usage;
2182 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2183 mInputStreamInfo.dim.width,
2184 mInputStreamInfo.dim.height,
2185 mInputStreamInfo.format, mInputStreamInfo.usage);
2186 }
2187
2188 cleanAndSortStreamInfo();
2189 if (mMetadataChannel) {
2190 delete mMetadataChannel;
2191 mMetadataChannel = NULL;
2192 }
2193 if (mSupportChannel) {
2194 delete mSupportChannel;
2195 mSupportChannel = NULL;
2196 }
2197
2198 if (mAnalysisChannel) {
2199 delete mAnalysisChannel;
2200 mAnalysisChannel = NULL;
2201 }
2202
2203 if (mDummyBatchChannel) {
2204 delete mDummyBatchChannel;
2205 mDummyBatchChannel = NULL;
2206 }
2207
Emilian Peev7650c122017-01-19 08:24:33 -08002208 if (mDepthChannel) {
2209 mDepthChannel = NULL;
2210 }
2211
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002212 mShutterDispatcher.clear();
2213 mOutputBufferDispatcher.clear();
2214
Thierry Strudel2896d122017-02-23 19:18:03 -08002215 char is_type_value[PROPERTY_VALUE_MAX];
2216 property_get("persist.camera.is_type", is_type_value, "4");
2217 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2218
Binhao Line406f062017-05-03 14:39:44 -07002219 char property_value[PROPERTY_VALUE_MAX];
2220 property_get("persist.camera.gzoom.at", property_value, "0");
2221 int goog_zoom_at = atoi(property_value);
2222 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0);
2223 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0);
2224
2225 property_get("persist.camera.gzoom.4k", property_value, "0");
2226 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2227
Thierry Strudel3d639192016-09-09 11:52:26 -07002228 //Create metadata channel and initialize it
2229 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2230 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2231 gCamCapability[mCameraId]->color_arrangement);
2232 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2233 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002234 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002235 if (mMetadataChannel == NULL) {
2236 LOGE("failed to allocate metadata channel");
2237 rc = -ENOMEM;
2238 pthread_mutex_unlock(&mMutex);
2239 return rc;
2240 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002241 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002242 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2243 if (rc < 0) {
2244 LOGE("metadata channel initialization failed");
2245 delete mMetadataChannel;
2246 mMetadataChannel = NULL;
2247 pthread_mutex_unlock(&mMutex);
2248 return rc;
2249 }
2250
Thierry Strudel2896d122017-02-23 19:18:03 -08002251 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002252 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002253 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002254 // Keep track of preview/video streams indices.
2255 // There could be more than one preview streams, but only one video stream.
2256 int32_t video_stream_idx = -1;
2257 int32_t preview_stream_idx[streamList->num_streams];
2258 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002259 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2260 /* Allocate channel objects for the requested streams */
2261 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002262
Thierry Strudel3d639192016-09-09 11:52:26 -07002263 camera3_stream_t *newStream = streamList->streams[i];
2264 uint32_t stream_usage = newStream->usage;
2265 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2266 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2267 struct camera_info *p_info = NULL;
2268 pthread_mutex_lock(&gCamLock);
2269 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2270 pthread_mutex_unlock(&gCamLock);
2271 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2272 || IS_USAGE_ZSL(newStream->usage)) &&
2273 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002274 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002275 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002276 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2277 if (bUseCommonFeatureMask)
2278 zsl_ppmask = commonFeatureMask;
2279 else
2280 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002281 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002282 if (numStreamsOnEncoder > 0)
2283 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2284 else
2285 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002286 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002287 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002288 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002289 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002290 LOGH("Input stream configured, reprocess config");
2291 } else {
2292 //for non zsl streams find out the format
2293 switch (newStream->format) {
2294 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2295 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002296 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002297 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2298 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2299 /* add additional features to pp feature mask */
2300 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2301 mStreamConfigInfo.num_streams);
2302
2303 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2304 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2305 CAM_STREAM_TYPE_VIDEO;
2306 if (m_bTnrEnabled && m_bTnrVideo) {
2307 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2308 CAM_QCOM_FEATURE_CPP_TNR;
2309 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2310 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2311 ~CAM_QCOM_FEATURE_CDS;
2312 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002313 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2314 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2315 CAM_QTI_FEATURE_PPEISCORE;
2316 }
Binhao Line406f062017-05-03 14:39:44 -07002317 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2318 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2319 CAM_QCOM_FEATURE_GOOG_ZOOM;
2320 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002321 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002322 } else {
2323 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2324 CAM_STREAM_TYPE_PREVIEW;
2325 if (m_bTnrEnabled && m_bTnrPreview) {
2326 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2327 CAM_QCOM_FEATURE_CPP_TNR;
2328 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2329 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2330 ~CAM_QCOM_FEATURE_CDS;
2331 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002332 if(!m_bSwTnrPreview) {
2333 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2334 ~CAM_QTI_FEATURE_SW_TNR;
2335 }
Binhao Line406f062017-05-03 14:39:44 -07002336 if (is_goog_zoom_preview_enabled) {
2337 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2338 CAM_QCOM_FEATURE_GOOG_ZOOM;
2339 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002340 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002341 padding_info.width_padding = mSurfaceStridePadding;
2342 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002343 previewSize.width = (int32_t)newStream->width;
2344 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002345 }
2346 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2347 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2348 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2349 newStream->height;
2350 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2351 newStream->width;
2352 }
2353 }
2354 break;
2355 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002356 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002357 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2358 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2359 if (bUseCommonFeatureMask)
2360 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2361 commonFeatureMask;
2362 else
2363 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2364 CAM_QCOM_FEATURE_NONE;
2365 } else {
2366 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2367 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2368 }
2369 break;
2370 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002371 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002372 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2373 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2374 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2375 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2376 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002377 /* Remove rotation if it is not supported
2378 for 4K LiveVideo snapshot case (online processing) */
2379 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2380 CAM_QCOM_FEATURE_ROTATION)) {
2381 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2382 &= ~CAM_QCOM_FEATURE_ROTATION;
2383 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002384 } else {
2385 if (bUseCommonFeatureMask &&
2386 isOnEncoder(maxViewfinderSize, newStream->width,
2387 newStream->height)) {
2388 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2389 } else {
2390 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2391 }
2392 }
2393 if (isZsl) {
2394 if (zslStream) {
2395 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2396 (int32_t)zslStream->width;
2397 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2398 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002399 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2400 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002401 } else {
2402 LOGE("Error, No ZSL stream identified");
2403 pthread_mutex_unlock(&mMutex);
2404 return -EINVAL;
2405 }
2406 } else if (m_bIs4KVideo) {
2407 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2408 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2409 } else if (bYuv888OverrideJpeg) {
2410 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2411 (int32_t)largeYuv888Size.width;
2412 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2413 (int32_t)largeYuv888Size.height;
2414 }
2415 break;
2416 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2417 case HAL_PIXEL_FORMAT_RAW16:
2418 case HAL_PIXEL_FORMAT_RAW10:
2419 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2420 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2421 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002422 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2423 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2424 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2425 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2426 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2427 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2428 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2429 gCamCapability[mCameraId]->dt[mPDIndex];
2430 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2431 gCamCapability[mCameraId]->vc[mPDIndex];
2432 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002433 break;
2434 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002435 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002436 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2438 break;
2439 }
2440 }
2441
2442 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2443 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2444 gCamCapability[mCameraId]->color_arrangement);
2445
2446 if (newStream->priv == NULL) {
2447 //New stream, construct channel
2448 switch (newStream->stream_type) {
2449 case CAMERA3_STREAM_INPUT:
2450 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2451 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2452 break;
2453 case CAMERA3_STREAM_BIDIRECTIONAL:
2454 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2455 GRALLOC_USAGE_HW_CAMERA_WRITE;
2456 break;
2457 case CAMERA3_STREAM_OUTPUT:
2458 /* For video encoding stream, set read/write rarely
2459 * flag so that they may be set to un-cached */
2460 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2461 newStream->usage |=
2462 (GRALLOC_USAGE_SW_READ_RARELY |
2463 GRALLOC_USAGE_SW_WRITE_RARELY |
2464 GRALLOC_USAGE_HW_CAMERA_WRITE);
2465 else if (IS_USAGE_ZSL(newStream->usage))
2466 {
2467 LOGD("ZSL usage flag skipping");
2468 }
2469 else if (newStream == zslStream
2470 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2471 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2472 } else
2473 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2474 break;
2475 default:
2476 LOGE("Invalid stream_type %d", newStream->stream_type);
2477 break;
2478 }
2479
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002480 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002481 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2482 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2483 QCamera3ProcessingChannel *channel = NULL;
2484 switch (newStream->format) {
2485 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2486 if ((newStream->usage &
2487 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2488 (streamList->operation_mode ==
2489 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2490 ) {
2491 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2492 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002493 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002494 this,
2495 newStream,
2496 (cam_stream_type_t)
2497 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2499 mMetadataChannel,
2500 0); //heap buffers are not required for HFR video channel
2501 if (channel == NULL) {
2502 LOGE("allocation of channel failed");
2503 pthread_mutex_unlock(&mMutex);
2504 return -ENOMEM;
2505 }
2506 //channel->getNumBuffers() will return 0 here so use
2507 //MAX_INFLIGH_HFR_REQUESTS
2508 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2509 newStream->priv = channel;
2510 LOGI("num video buffers in HFR mode: %d",
2511 MAX_INFLIGHT_HFR_REQUESTS);
2512 } else {
2513 /* Copy stream contents in HFR preview only case to create
2514 * dummy batch channel so that sensor streaming is in
2515 * HFR mode */
2516 if (!m_bIsVideo && (streamList->operation_mode ==
2517 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2518 mDummyBatchStream = *newStream;
2519 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002520 int bufferCount = MAX_INFLIGHT_REQUESTS;
2521 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2522 CAM_STREAM_TYPE_VIDEO) {
2523 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2524 bufferCount = MAX_VIDEO_BUFFERS;
2525 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002526 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2527 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002528 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002529 this,
2530 newStream,
2531 (cam_stream_type_t)
2532 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2533 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2534 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002535 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002536 if (channel == NULL) {
2537 LOGE("allocation of channel failed");
2538 pthread_mutex_unlock(&mMutex);
2539 return -ENOMEM;
2540 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002541 /* disable UBWC for preview, though supported,
2542 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002543 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002544 (previewSize.width == (int32_t)videoWidth)&&
2545 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002546 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002547 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002548 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002549 /* When goog_zoom is linked to the preview or video stream,
2550 * disable ubwc to the linked stream */
2551 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2552 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2553 channel->setUBWCEnabled(false);
2554 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002555 newStream->max_buffers = channel->getNumBuffers();
2556 newStream->priv = channel;
2557 }
2558 break;
2559 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2560 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2561 mChannelHandle,
2562 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002563 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002564 this,
2565 newStream,
2566 (cam_stream_type_t)
2567 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2568 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2569 mMetadataChannel);
2570 if (channel == NULL) {
2571 LOGE("allocation of YUV channel failed");
2572 pthread_mutex_unlock(&mMutex);
2573 return -ENOMEM;
2574 }
2575 newStream->max_buffers = channel->getNumBuffers();
2576 newStream->priv = channel;
2577 break;
2578 }
2579 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2580 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002581 case HAL_PIXEL_FORMAT_RAW10: {
2582 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2583 (HAL_DATASPACE_DEPTH != newStream->data_space))
2584 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002585 mRawChannel = new QCamera3RawChannel(
2586 mCameraHandle->camera_handle, mChannelHandle,
2587 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002588 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002589 this, newStream,
2590 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002591 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002592 if (mRawChannel == NULL) {
2593 LOGE("allocation of raw channel failed");
2594 pthread_mutex_unlock(&mMutex);
2595 return -ENOMEM;
2596 }
2597 newStream->max_buffers = mRawChannel->getNumBuffers();
2598 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2599 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002600 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002601 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002602 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2603 mDepthChannel = new QCamera3DepthChannel(
2604 mCameraHandle->camera_handle, mChannelHandle,
2605 mCameraHandle->ops, NULL, NULL, &padding_info,
2606 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2607 mMetadataChannel);
2608 if (NULL == mDepthChannel) {
2609 LOGE("Allocation of depth channel failed");
2610 pthread_mutex_unlock(&mMutex);
2611 return NO_MEMORY;
2612 }
2613 newStream->priv = mDepthChannel;
2614 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2615 } else {
2616 // Max live snapshot inflight buffer is 1. This is to mitigate
2617 // frame drop issues for video snapshot. The more buffers being
2618 // allocated, the more frame drops there are.
2619 mPictureChannel = new QCamera3PicChannel(
2620 mCameraHandle->camera_handle, mChannelHandle,
2621 mCameraHandle->ops, captureResultCb,
2622 setBufferErrorStatus, &padding_info, this, newStream,
2623 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2624 m_bIs4KVideo, isZsl, mMetadataChannel,
2625 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2626 if (mPictureChannel == NULL) {
2627 LOGE("allocation of channel failed");
2628 pthread_mutex_unlock(&mMutex);
2629 return -ENOMEM;
2630 }
2631 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2632 newStream->max_buffers = mPictureChannel->getNumBuffers();
2633 mPictureChannel->overrideYuvSize(
2634 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2635 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002636 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002637 break;
2638
2639 default:
2640 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002641 pthread_mutex_unlock(&mMutex);
2642 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002643 }
2644 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2645 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2646 } else {
2647 LOGE("Error, Unknown stream type");
2648 pthread_mutex_unlock(&mMutex);
2649 return -EINVAL;
2650 }
2651
2652 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002653 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2654 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002655 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002656 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002657 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2658 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2659 }
2660 }
2661
2662 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2663 it != mStreamInfo.end(); it++) {
2664 if ((*it)->stream == newStream) {
2665 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2666 break;
2667 }
2668 }
2669 } else {
2670 // Channel already exists for this stream
2671 // Do nothing for now
2672 }
2673 padding_info = gCamCapability[mCameraId]->padding_info;
2674
Emilian Peev7650c122017-01-19 08:24:33 -08002675 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002676 * since there is no real stream associated with it
2677 */
Emilian Peev7650c122017-01-19 08:24:33 -08002678 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002679 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2680 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002681 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002682 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002683 }
2684
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002685 // Let buffer dispatcher know the configured streams.
2686 mOutputBufferDispatcher.configureStreams(streamList);
2687
Binhao Lincdb362a2017-04-20 13:31:54 -07002688 // By default, preview stream TNR is disabled.
2689 // Enable TNR to the preview stream if all conditions below are satisfied:
2690 // 1. resolution <= 1080p.
2691 // 2. preview resolution == video resolution.
2692 // 3. video stream TNR is enabled.
2693 // 4. EIS2.0
2694 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2695 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2696 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2697 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2698 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2699 video_stream->width == preview_stream->width &&
2700 video_stream->height == preview_stream->height) {
2701 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2702 CAM_QCOM_FEATURE_CPP_TNR;
2703 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2704 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2705 ~CAM_QCOM_FEATURE_CDS;
2706 }
2707 }
2708
Thierry Strudel2896d122017-02-23 19:18:03 -08002709 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2710 onlyRaw = false;
2711 }
2712
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002713 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002714 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002715 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002716 cam_analysis_info_t analysisInfo;
2717 int32_t ret = NO_ERROR;
2718 ret = mCommon.getAnalysisInfo(
2719 FALSE,
2720 analysisFeatureMask,
2721 &analysisInfo);
2722 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002723 cam_color_filter_arrangement_t analysis_color_arrangement =
2724 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2725 CAM_FILTER_ARRANGEMENT_Y :
2726 gCamCapability[mCameraId]->color_arrangement);
2727 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2728 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002729 cam_dimension_t analysisDim;
2730 analysisDim = mCommon.getMatchingDimension(previewSize,
2731 analysisInfo.analysis_recommended_res);
2732
2733 mAnalysisChannel = new QCamera3SupportChannel(
2734 mCameraHandle->camera_handle,
2735 mChannelHandle,
2736 mCameraHandle->ops,
2737 &analysisInfo.analysis_padding_info,
2738 analysisFeatureMask,
2739 CAM_STREAM_TYPE_ANALYSIS,
2740 &analysisDim,
2741 (analysisInfo.analysis_format
2742 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2743 : CAM_FORMAT_YUV_420_NV21),
2744 analysisInfo.hw_analysis_supported,
2745 gCamCapability[mCameraId]->color_arrangement,
2746 this,
2747 0); // force buffer count to 0
2748 } else {
2749 LOGW("getAnalysisInfo failed, ret = %d", ret);
2750 }
2751 if (!mAnalysisChannel) {
2752 LOGW("Analysis channel cannot be created");
2753 }
2754 }
2755
Thierry Strudel3d639192016-09-09 11:52:26 -07002756 //RAW DUMP channel
2757 if (mEnableRawDump && isRawStreamRequested == false){
2758 cam_dimension_t rawDumpSize;
2759 rawDumpSize = getMaxRawSize(mCameraId);
2760 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2761 setPAAFSupport(rawDumpFeatureMask,
2762 CAM_STREAM_TYPE_RAW,
2763 gCamCapability[mCameraId]->color_arrangement);
2764 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2765 mChannelHandle,
2766 mCameraHandle->ops,
2767 rawDumpSize,
2768 &padding_info,
2769 this, rawDumpFeatureMask);
2770 if (!mRawDumpChannel) {
2771 LOGE("Raw Dump channel cannot be created");
2772 pthread_mutex_unlock(&mMutex);
2773 return -ENOMEM;
2774 }
2775 }
2776
Thierry Strudel3d639192016-09-09 11:52:26 -07002777 if (mAnalysisChannel) {
2778 cam_analysis_info_t analysisInfo;
2779 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2780 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2781 CAM_STREAM_TYPE_ANALYSIS;
2782 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2783 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002784 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002785 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2786 &analysisInfo);
2787 if (rc != NO_ERROR) {
2788 LOGE("getAnalysisInfo failed, ret = %d", rc);
2789 pthread_mutex_unlock(&mMutex);
2790 return rc;
2791 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002792 cam_color_filter_arrangement_t analysis_color_arrangement =
2793 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2794 CAM_FILTER_ARRANGEMENT_Y :
2795 gCamCapability[mCameraId]->color_arrangement);
2796 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2797 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2798 analysis_color_arrangement);
2799
Thierry Strudel3d639192016-09-09 11:52:26 -07002800 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002801 mCommon.getMatchingDimension(previewSize,
2802 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002803 mStreamConfigInfo.num_streams++;
2804 }
2805
Thierry Strudel2896d122017-02-23 19:18:03 -08002806 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002807 cam_analysis_info_t supportInfo;
2808 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2809 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2810 setPAAFSupport(callbackFeatureMask,
2811 CAM_STREAM_TYPE_CALLBACK,
2812 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002813 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002814 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002815 if (ret != NO_ERROR) {
2816 /* Ignore the error for Mono camera
2817 * because the PAAF bit mask is only set
2818 * for CAM_STREAM_TYPE_ANALYSIS stream type
2819 */
2820 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2821 LOGW("getAnalysisInfo failed, ret = %d", ret);
2822 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002823 }
2824 mSupportChannel = new QCamera3SupportChannel(
2825 mCameraHandle->camera_handle,
2826 mChannelHandle,
2827 mCameraHandle->ops,
2828 &gCamCapability[mCameraId]->padding_info,
2829 callbackFeatureMask,
2830 CAM_STREAM_TYPE_CALLBACK,
2831 &QCamera3SupportChannel::kDim,
2832 CAM_FORMAT_YUV_420_NV21,
2833 supportInfo.hw_analysis_supported,
2834 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002835 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002836 if (!mSupportChannel) {
2837 LOGE("dummy channel cannot be created");
2838 pthread_mutex_unlock(&mMutex);
2839 return -ENOMEM;
2840 }
2841 }
2842
2843 if (mSupportChannel) {
2844 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2845 QCamera3SupportChannel::kDim;
2846 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2847 CAM_STREAM_TYPE_CALLBACK;
2848 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2849 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2850 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2851 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2852 gCamCapability[mCameraId]->color_arrangement);
2853 mStreamConfigInfo.num_streams++;
2854 }
2855
2856 if (mRawDumpChannel) {
2857 cam_dimension_t rawSize;
2858 rawSize = getMaxRawSize(mCameraId);
2859 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2860 rawSize;
2861 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2862 CAM_STREAM_TYPE_RAW;
2863 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2864 CAM_QCOM_FEATURE_NONE;
2865 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2866 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2867 gCamCapability[mCameraId]->color_arrangement);
2868 mStreamConfigInfo.num_streams++;
2869 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002870
2871 if (mHdrPlusRawSrcChannel) {
2872 cam_dimension_t rawSize;
2873 rawSize = getMaxRawSize(mCameraId);
2874 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2875 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2876 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2877 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2878 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2879 gCamCapability[mCameraId]->color_arrangement);
2880 mStreamConfigInfo.num_streams++;
2881 }
2882
Thierry Strudel3d639192016-09-09 11:52:26 -07002883 /* In HFR mode, if video stream is not added, create a dummy channel so that
2884 * ISP can create a batch mode even for preview only case. This channel is
2885 * never 'start'ed (no stream-on), it is only 'initialized' */
2886 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2887 !m_bIsVideo) {
2888 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2889 setPAAFSupport(dummyFeatureMask,
2890 CAM_STREAM_TYPE_VIDEO,
2891 gCamCapability[mCameraId]->color_arrangement);
2892 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2893 mChannelHandle,
2894 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002895 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002896 this,
2897 &mDummyBatchStream,
2898 CAM_STREAM_TYPE_VIDEO,
2899 dummyFeatureMask,
2900 mMetadataChannel);
2901 if (NULL == mDummyBatchChannel) {
2902 LOGE("creation of mDummyBatchChannel failed."
2903 "Preview will use non-hfr sensor mode ");
2904 }
2905 }
2906 if (mDummyBatchChannel) {
2907 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2908 mDummyBatchStream.width;
2909 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2910 mDummyBatchStream.height;
2911 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2912 CAM_STREAM_TYPE_VIDEO;
2913 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2914 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2915 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2916 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2917 gCamCapability[mCameraId]->color_arrangement);
2918 mStreamConfigInfo.num_streams++;
2919 }
2920
2921 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2922 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002923 m_bIs4KVideo ? 0 :
2924 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002925
2926 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2927 for (pendingRequestIterator i = mPendingRequestsList.begin();
2928 i != mPendingRequestsList.end();) {
2929 i = erasePendingRequest(i);
2930 }
2931 mPendingFrameDropList.clear();
2932 // Initialize/Reset the pending buffers list
2933 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2934 req.mPendingBufferList.clear();
2935 }
2936 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2937
Thierry Strudel3d639192016-09-09 11:52:26 -07002938 mCurJpegMeta.clear();
2939 //Get min frame duration for this streams configuration
2940 deriveMinFrameDuration();
2941
Chien-Yu Chenee335912017-02-09 17:53:20 -08002942 mFirstPreviewIntentSeen = false;
2943
2944 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002945 {
2946 Mutex::Autolock l(gHdrPlusClientLock);
2947 disableHdrPlusModeLocked();
2948 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002949
Thierry Strudel3d639192016-09-09 11:52:26 -07002950 // Update state
2951 mState = CONFIGURED;
2952
Shuzhen Wang3c077d72017-04-20 22:48:59 -07002953 mFirstMetadataCallback = true;
2954
Thierry Strudel3d639192016-09-09 11:52:26 -07002955 pthread_mutex_unlock(&mMutex);
2956
2957 return rc;
2958}
2959
2960/*===========================================================================
2961 * FUNCTION : validateCaptureRequest
2962 *
2963 * DESCRIPTION: validate a capture request from camera service
2964 *
2965 * PARAMETERS :
2966 * @request : request from framework to process
2967 *
2968 * RETURN :
2969 *
2970 *==========================================================================*/
2971int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002972 camera3_capture_request_t *request,
2973 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002974{
2975 ssize_t idx = 0;
2976 const camera3_stream_buffer_t *b;
2977 CameraMetadata meta;
2978
2979 /* Sanity check the request */
2980 if (request == NULL) {
2981 LOGE("NULL capture request");
2982 return BAD_VALUE;
2983 }
2984
2985 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2986 /*settings cannot be null for the first request*/
2987 return BAD_VALUE;
2988 }
2989
2990 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002991 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2992 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002993 LOGE("Request %d: No output buffers provided!",
2994 __FUNCTION__, frameNumber);
2995 return BAD_VALUE;
2996 }
2997 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2998 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2999 request->num_output_buffers, MAX_NUM_STREAMS);
3000 return BAD_VALUE;
3001 }
3002 if (request->input_buffer != NULL) {
3003 b = request->input_buffer;
3004 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3005 LOGE("Request %d: Buffer %ld: Status not OK!",
3006 frameNumber, (long)idx);
3007 return BAD_VALUE;
3008 }
3009 if (b->release_fence != -1) {
3010 LOGE("Request %d: Buffer %ld: Has a release fence!",
3011 frameNumber, (long)idx);
3012 return BAD_VALUE;
3013 }
3014 if (b->buffer == NULL) {
3015 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3016 frameNumber, (long)idx);
3017 return BAD_VALUE;
3018 }
3019 }
3020
3021 // Validate all buffers
3022 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003023 if (b == NULL) {
3024 return BAD_VALUE;
3025 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003026 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003027 QCamera3ProcessingChannel *channel =
3028 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3029 if (channel == NULL) {
3030 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3031 frameNumber, (long)idx);
3032 return BAD_VALUE;
3033 }
3034 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3035 LOGE("Request %d: Buffer %ld: Status not OK!",
3036 frameNumber, (long)idx);
3037 return BAD_VALUE;
3038 }
3039 if (b->release_fence != -1) {
3040 LOGE("Request %d: Buffer %ld: Has a release fence!",
3041 frameNumber, (long)idx);
3042 return BAD_VALUE;
3043 }
3044 if (b->buffer == NULL) {
3045 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3046 frameNumber, (long)idx);
3047 return BAD_VALUE;
3048 }
3049 if (*(b->buffer) == NULL) {
3050 LOGE("Request %d: Buffer %ld: NULL private handle!",
3051 frameNumber, (long)idx);
3052 return BAD_VALUE;
3053 }
3054 idx++;
3055 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003056 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003057 return NO_ERROR;
3058}
3059
3060/*===========================================================================
3061 * FUNCTION : deriveMinFrameDuration
3062 *
3063 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3064 * on currently configured streams.
3065 *
3066 * PARAMETERS : NONE
3067 *
3068 * RETURN : NONE
3069 *
3070 *==========================================================================*/
3071void QCamera3HardwareInterface::deriveMinFrameDuration()
3072{
3073 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3074
3075 maxJpegDim = 0;
3076 maxProcessedDim = 0;
3077 maxRawDim = 0;
3078
3079 // Figure out maximum jpeg, processed, and raw dimensions
3080 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3081 it != mStreamInfo.end(); it++) {
3082
3083 // Input stream doesn't have valid stream_type
3084 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3085 continue;
3086
3087 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3088 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3089 if (dimension > maxJpegDim)
3090 maxJpegDim = dimension;
3091 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3092 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3093 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3094 if (dimension > maxRawDim)
3095 maxRawDim = dimension;
3096 } else {
3097 if (dimension > maxProcessedDim)
3098 maxProcessedDim = dimension;
3099 }
3100 }
3101
3102 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3103 MAX_SIZES_CNT);
3104
3105 //Assume all jpeg dimensions are in processed dimensions.
3106 if (maxJpegDim > maxProcessedDim)
3107 maxProcessedDim = maxJpegDim;
3108 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3109 if (maxProcessedDim > maxRawDim) {
3110 maxRawDim = INT32_MAX;
3111
3112 for (size_t i = 0; i < count; i++) {
3113 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3114 gCamCapability[mCameraId]->raw_dim[i].height;
3115 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3116 maxRawDim = dimension;
3117 }
3118 }
3119
3120 //Find minimum durations for processed, jpeg, and raw
3121 for (size_t i = 0; i < count; i++) {
3122 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3123 gCamCapability[mCameraId]->raw_dim[i].height) {
3124 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3125 break;
3126 }
3127 }
3128 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3129 for (size_t i = 0; i < count; i++) {
3130 if (maxProcessedDim ==
3131 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3132 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3133 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3134 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3135 break;
3136 }
3137 }
3138}
3139
3140/*===========================================================================
3141 * FUNCTION : getMinFrameDuration
3142 *
3143 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3144 * and current request configuration.
3145 *
3146 * PARAMETERS : @request: requset sent by the frameworks
3147 *
3148 * RETURN : min farme duration for a particular request
3149 *
3150 *==========================================================================*/
3151int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3152{
3153 bool hasJpegStream = false;
3154 bool hasRawStream = false;
3155 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3156 const camera3_stream_t *stream = request->output_buffers[i].stream;
3157 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3158 hasJpegStream = true;
3159 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3160 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3161 stream->format == HAL_PIXEL_FORMAT_RAW16)
3162 hasRawStream = true;
3163 }
3164
3165 if (!hasJpegStream)
3166 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3167 else
3168 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3169}
3170
3171/*===========================================================================
3172 * FUNCTION : handleBuffersDuringFlushLock
3173 *
3174 * DESCRIPTION: Account for buffers returned from back-end during flush
3175 * This function is executed while mMutex is held by the caller.
3176 *
3177 * PARAMETERS :
3178 * @buffer: image buffer for the callback
3179 *
3180 * RETURN :
3181 *==========================================================================*/
3182void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3183{
3184 bool buffer_found = false;
3185 for (List<PendingBuffersInRequest>::iterator req =
3186 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3187 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3188 for (List<PendingBufferInfo>::iterator i =
3189 req->mPendingBufferList.begin();
3190 i != req->mPendingBufferList.end(); i++) {
3191 if (i->buffer == buffer->buffer) {
3192 mPendingBuffersMap.numPendingBufsAtFlush--;
3193 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3194 buffer->buffer, req->frame_number,
3195 mPendingBuffersMap.numPendingBufsAtFlush);
3196 buffer_found = true;
3197 break;
3198 }
3199 }
3200 if (buffer_found) {
3201 break;
3202 }
3203 }
3204 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3205 //signal the flush()
3206 LOGD("All buffers returned to HAL. Continue flush");
3207 pthread_cond_signal(&mBuffersCond);
3208 }
3209}
3210
Thierry Strudel3d639192016-09-09 11:52:26 -07003211/*===========================================================================
3212 * FUNCTION : handleBatchMetadata
3213 *
3214 * DESCRIPTION: Handles metadata buffer callback in batch mode
3215 *
3216 * PARAMETERS : @metadata_buf: metadata buffer
3217 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3218 * the meta buf in this method
3219 *
3220 * RETURN :
3221 *
3222 *==========================================================================*/
3223void QCamera3HardwareInterface::handleBatchMetadata(
3224 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3225{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003226 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003227
3228 if (NULL == metadata_buf) {
3229 LOGE("metadata_buf is NULL");
3230 return;
3231 }
3232 /* In batch mode, the metdata will contain the frame number and timestamp of
3233 * the last frame in the batch. Eg: a batch containing buffers from request
3234 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3235 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3236 * multiple process_capture_results */
3237 metadata_buffer_t *metadata =
3238 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3239 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3240 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3241 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3242 uint32_t frame_number = 0, urgent_frame_number = 0;
3243 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3244 bool invalid_metadata = false;
3245 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3246 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003247 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003248
3249 int32_t *p_frame_number_valid =
3250 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3251 uint32_t *p_frame_number =
3252 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3253 int64_t *p_capture_time =
3254 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3255 int32_t *p_urgent_frame_number_valid =
3256 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3257 uint32_t *p_urgent_frame_number =
3258 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3259
3260 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3261 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3262 (NULL == p_urgent_frame_number)) {
3263 LOGE("Invalid metadata");
3264 invalid_metadata = true;
3265 } else {
3266 frame_number_valid = *p_frame_number_valid;
3267 last_frame_number = *p_frame_number;
3268 last_frame_capture_time = *p_capture_time;
3269 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3270 last_urgent_frame_number = *p_urgent_frame_number;
3271 }
3272
3273 /* In batchmode, when no video buffers are requested, set_parms are sent
3274 * for every capture_request. The difference between consecutive urgent
3275 * frame numbers and frame numbers should be used to interpolate the
3276 * corresponding frame numbers and time stamps */
3277 pthread_mutex_lock(&mMutex);
3278 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003279 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3280 if(idx < 0) {
3281 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3282 last_urgent_frame_number);
3283 mState = ERROR;
3284 pthread_mutex_unlock(&mMutex);
3285 return;
3286 }
3287 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003288 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3289 first_urgent_frame_number;
3290
3291 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3292 urgent_frame_number_valid,
3293 first_urgent_frame_number, last_urgent_frame_number);
3294 }
3295
3296 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003297 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3298 if(idx < 0) {
3299 LOGE("Invalid frame number received: %d. Irrecoverable error",
3300 last_frame_number);
3301 mState = ERROR;
3302 pthread_mutex_unlock(&mMutex);
3303 return;
3304 }
3305 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003306 frameNumDiff = last_frame_number + 1 -
3307 first_frame_number;
3308 mPendingBatchMap.removeItem(last_frame_number);
3309
3310 LOGD("frm: valid: %d frm_num: %d - %d",
3311 frame_number_valid,
3312 first_frame_number, last_frame_number);
3313
3314 }
3315 pthread_mutex_unlock(&mMutex);
3316
3317 if (urgent_frame_number_valid || frame_number_valid) {
3318 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3319 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3320 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3321 urgentFrameNumDiff, last_urgent_frame_number);
3322 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3323 LOGE("frameNumDiff: %d frameNum: %d",
3324 frameNumDiff, last_frame_number);
3325 }
3326
3327 for (size_t i = 0; i < loopCount; i++) {
3328 /* handleMetadataWithLock is called even for invalid_metadata for
3329 * pipeline depth calculation */
3330 if (!invalid_metadata) {
3331 /* Infer frame number. Batch metadata contains frame number of the
3332 * last frame */
3333 if (urgent_frame_number_valid) {
3334 if (i < urgentFrameNumDiff) {
3335 urgent_frame_number =
3336 first_urgent_frame_number + i;
3337 LOGD("inferred urgent frame_number: %d",
3338 urgent_frame_number);
3339 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3340 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3341 } else {
3342 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3343 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3344 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3345 }
3346 }
3347
3348 /* Infer frame number. Batch metadata contains frame number of the
3349 * last frame */
3350 if (frame_number_valid) {
3351 if (i < frameNumDiff) {
3352 frame_number = first_frame_number + i;
3353 LOGD("inferred frame_number: %d", frame_number);
3354 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3355 CAM_INTF_META_FRAME_NUMBER, frame_number);
3356 } else {
3357 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3358 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3359 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3360 }
3361 }
3362
3363 if (last_frame_capture_time) {
3364 //Infer timestamp
3365 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003366 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003367 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003368 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003369 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3370 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3371 LOGD("batch capture_time: %lld, capture_time: %lld",
3372 last_frame_capture_time, capture_time);
3373 }
3374 }
3375 pthread_mutex_lock(&mMutex);
3376 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003377 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003378 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3379 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003380 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003381 pthread_mutex_unlock(&mMutex);
3382 }
3383
3384 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003385 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003386 mMetadataChannel->bufDone(metadata_buf);
3387 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003388 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003389 }
3390}
3391
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003392void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3393 camera3_error_msg_code_t errorCode)
3394{
3395 camera3_notify_msg_t notify_msg;
3396 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3397 notify_msg.type = CAMERA3_MSG_ERROR;
3398 notify_msg.message.error.error_code = errorCode;
3399 notify_msg.message.error.error_stream = NULL;
3400 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003401 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003402
3403 return;
3404}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003405
3406/*===========================================================================
3407 * FUNCTION : sendPartialMetadataWithLock
3408 *
3409 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3410 *
3411 * PARAMETERS : @metadata: metadata buffer
3412 * @requestIter: The iterator for the pending capture request for
3413 * which the partial result is being sen
3414 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3415 * last urgent metadata in a batch. Always true for non-batch mode
3416 *
3417 * RETURN :
3418 *
3419 *==========================================================================*/
3420
3421void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3422 metadata_buffer_t *metadata,
3423 const pendingRequestIterator requestIter,
3424 bool lastUrgentMetadataInBatch)
3425{
3426 camera3_capture_result_t result;
3427 memset(&result, 0, sizeof(camera3_capture_result_t));
3428
3429 requestIter->partial_result_cnt++;
3430
3431 // Extract 3A metadata
3432 result.result = translateCbUrgentMetadataToResultMetadata(
3433 metadata, lastUrgentMetadataInBatch);
3434 // Populate metadata result
3435 result.frame_number = requestIter->frame_number;
3436 result.num_output_buffers = 0;
3437 result.output_buffers = NULL;
3438 result.partial_result = requestIter->partial_result_cnt;
3439
3440 {
3441 Mutex::Autolock l(gHdrPlusClientLock);
3442 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3443 // Notify HDR+ client about the partial metadata.
3444 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3445 result.partial_result == PARTIAL_RESULT_COUNT);
3446 }
3447 }
3448
3449 orchestrateResult(&result);
3450 LOGD("urgent frame_number = %u", result.frame_number);
3451 free_camera_metadata((camera_metadata_t *)result.result);
3452}
3453
Thierry Strudel3d639192016-09-09 11:52:26 -07003454/*===========================================================================
3455 * FUNCTION : handleMetadataWithLock
3456 *
3457 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3458 *
3459 * PARAMETERS : @metadata_buf: metadata buffer
3460 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3461 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003462 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3463 * last urgent metadata in a batch. Always true for non-batch mode
3464 * @lastMetadataInBatch: Boolean to indicate whether this is the
3465 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003466 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3467 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003468 *
3469 * RETURN :
3470 *
3471 *==========================================================================*/
3472void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003473 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003474 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3475 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003476{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003477 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003478 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3479 //during flush do not send metadata from this thread
3480 LOGD("not sending metadata during flush or when mState is error");
3481 if (free_and_bufdone_meta_buf) {
3482 mMetadataChannel->bufDone(metadata_buf);
3483 free(metadata_buf);
3484 }
3485 return;
3486 }
3487
3488 //not in flush
3489 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3490 int32_t frame_number_valid, urgent_frame_number_valid;
3491 uint32_t frame_number, urgent_frame_number;
3492 int64_t capture_time;
3493 nsecs_t currentSysTime;
3494
3495 int32_t *p_frame_number_valid =
3496 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3497 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3498 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3499 int32_t *p_urgent_frame_number_valid =
3500 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3501 uint32_t *p_urgent_frame_number =
3502 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3503 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3504 metadata) {
3505 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3506 *p_frame_number_valid, *p_frame_number);
3507 }
3508
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003509 camera_metadata_t *resultMetadata = nullptr;
3510
Thierry Strudel3d639192016-09-09 11:52:26 -07003511 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3512 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3513 LOGE("Invalid metadata");
3514 if (free_and_bufdone_meta_buf) {
3515 mMetadataChannel->bufDone(metadata_buf);
3516 free(metadata_buf);
3517 }
3518 goto done_metadata;
3519 }
3520 frame_number_valid = *p_frame_number_valid;
3521 frame_number = *p_frame_number;
3522 capture_time = *p_capture_time;
3523 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3524 urgent_frame_number = *p_urgent_frame_number;
3525 currentSysTime = systemTime(CLOCK_MONOTONIC);
3526
3527 // Detect if buffers from any requests are overdue
3528 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003529 int64_t timeout;
3530 {
3531 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3532 // If there is a pending HDR+ request, the following requests may be blocked until the
3533 // HDR+ request is done. So allow a longer timeout.
3534 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3535 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3536 }
3537
3538 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003539 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003540 assert(missed.stream->priv);
3541 if (missed.stream->priv) {
3542 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3543 assert(ch->mStreams[0]);
3544 if (ch->mStreams[0]) {
3545 LOGE("Cancel missing frame = %d, buffer = %p,"
3546 "stream type = %d, stream format = %d",
3547 req.frame_number, missed.buffer,
3548 ch->mStreams[0]->getMyType(), missed.stream->format);
3549 ch->timeoutFrame(req.frame_number);
3550 }
3551 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003552 }
3553 }
3554 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003555 //For the very first metadata callback, regardless whether it contains valid
3556 //frame number, send the partial metadata for the jumpstarting requests.
3557 //Note that this has to be done even if the metadata doesn't contain valid
3558 //urgent frame number, because in the case only 1 request is ever submitted
3559 //to HAL, there won't be subsequent valid urgent frame number.
3560 if (mFirstMetadataCallback) {
3561 for (pendingRequestIterator i =
3562 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3563 if (i->bUseFirstPartial) {
3564 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3565 }
3566 }
3567 mFirstMetadataCallback = false;
3568 }
3569
Thierry Strudel3d639192016-09-09 11:52:26 -07003570 //Partial result on process_capture_result for timestamp
3571 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003572 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003573
3574 //Recieved an urgent Frame Number, handle it
3575 //using partial results
3576 for (pendingRequestIterator i =
3577 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3578 LOGD("Iterator Frame = %d urgent frame = %d",
3579 i->frame_number, urgent_frame_number);
3580
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003581 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003582 (i->partial_result_cnt == 0)) {
3583 LOGE("Error: HAL missed urgent metadata for frame number %d",
3584 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003585 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003586 }
3587
3588 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003589 i->partial_result_cnt == 0) {
3590 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003591 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3592 // Instant AEC settled for this frame.
3593 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3594 mInstantAECSettledFrameNumber = urgent_frame_number;
3595 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003596 break;
3597 }
3598 }
3599 }
3600
3601 if (!frame_number_valid) {
3602 LOGD("Not a valid normal frame number, used as SOF only");
3603 if (free_and_bufdone_meta_buf) {
3604 mMetadataChannel->bufDone(metadata_buf);
3605 free(metadata_buf);
3606 }
3607 goto done_metadata;
3608 }
3609 LOGH("valid frame_number = %u, capture_time = %lld",
3610 frame_number, capture_time);
3611
Emilian Peev7650c122017-01-19 08:24:33 -08003612 if (metadata->is_depth_data_valid) {
3613 handleDepthDataLocked(metadata->depth_data, frame_number);
3614 }
3615
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003616 // Check whether any stream buffer corresponding to this is dropped or not
3617 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3618 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3619 for (auto & pendingRequest : mPendingRequestsList) {
3620 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3621 mInstantAECSettledFrameNumber)) {
3622 camera3_notify_msg_t notify_msg = {};
3623 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003624 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003625 QCamera3ProcessingChannel *channel =
3626 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003627 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003628 if (p_cam_frame_drop) {
3629 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003630 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003631 // Got the stream ID for drop frame.
3632 dropFrame = true;
3633 break;
3634 }
3635 }
3636 } else {
3637 // This is instant AEC case.
3638 // For instant AEC drop the stream untill AEC is settled.
3639 dropFrame = true;
3640 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003641
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003642 if (dropFrame) {
3643 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3644 if (p_cam_frame_drop) {
3645 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003646 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003647 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003648 } else {
3649 // For instant AEC, inform frame drop and frame number
3650 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3651 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003652 pendingRequest.frame_number, streamID,
3653 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003654 }
3655 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003656 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003657 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003658 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003659 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003660 if (p_cam_frame_drop) {
3661 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003662 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003663 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003664 } else {
3665 // For instant AEC, inform frame drop and frame number
3666 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3667 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003668 pendingRequest.frame_number, streamID,
3669 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003670 }
3671 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003672 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003673 PendingFrameDrop.stream_ID = streamID;
3674 // Add the Frame drop info to mPendingFrameDropList
3675 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003676 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003677 }
3678 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003679 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003680
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003681 for (auto & pendingRequest : mPendingRequestsList) {
3682 // Find the pending request with the frame number.
3683 if (pendingRequest.frame_number == frame_number) {
3684 // Update the sensor timestamp.
3685 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003686
Thierry Strudel3d639192016-09-09 11:52:26 -07003687
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003688 /* Set the timestamp in display metadata so that clients aware of
3689 private_handle such as VT can use this un-modified timestamps.
3690 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003691 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003692
Thierry Strudel3d639192016-09-09 11:52:26 -07003693 // Find channel requiring metadata, meaning internal offline postprocess
3694 // is needed.
3695 //TODO: for now, we don't support two streams requiring metadata at the same time.
3696 // (because we are not making copies, and metadata buffer is not reference counted.
3697 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003698 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3699 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003700 if (iter->need_metadata) {
3701 internalPproc = true;
3702 QCamera3ProcessingChannel *channel =
3703 (QCamera3ProcessingChannel *)iter->stream->priv;
3704 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003705 if(p_is_metabuf_queued != NULL) {
3706 *p_is_metabuf_queued = true;
3707 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003708 break;
3709 }
3710 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003711 for (auto itr = pendingRequest.internalRequestList.begin();
3712 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003713 if (itr->need_metadata) {
3714 internalPproc = true;
3715 QCamera3ProcessingChannel *channel =
3716 (QCamera3ProcessingChannel *)itr->stream->priv;
3717 channel->queueReprocMetadata(metadata_buf);
3718 break;
3719 }
3720 }
3721
Thierry Strudel54dc9782017-02-15 12:12:10 -08003722 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003723
3724 bool *enableZsl = nullptr;
3725 if (gExposeEnableZslKey) {
3726 enableZsl = &pendingRequest.enableZsl;
3727 }
3728
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003729 resultMetadata = translateFromHalMetadata(metadata,
3730 pendingRequest.timestamp, pendingRequest.request_id,
3731 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3732 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003733 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003734 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003735 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003736 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003737 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003738 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003739
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003740 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003741
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003742 if (pendingRequest.blob_request) {
3743 //Dump tuning metadata if enabled and available
3744 char prop[PROPERTY_VALUE_MAX];
3745 memset(prop, 0, sizeof(prop));
3746 property_get("persist.camera.dumpmetadata", prop, "0");
3747 int32_t enabled = atoi(prop);
3748 if (enabled && metadata->is_tuning_params_valid) {
3749 dumpMetadataToFile(metadata->tuning_params,
3750 mMetaFrameCount,
3751 enabled,
3752 "Snapshot",
3753 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003754 }
3755 }
3756
3757 if (!internalPproc) {
3758 LOGD("couldn't find need_metadata for this metadata");
3759 // Return metadata buffer
3760 if (free_and_bufdone_meta_buf) {
3761 mMetadataChannel->bufDone(metadata_buf);
3762 free(metadata_buf);
3763 }
3764 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003765
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003766 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003767 }
3768 }
3769
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003770 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3771
3772 // Try to send out capture result metadata.
3773 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003774 return;
3775
Thierry Strudel3d639192016-09-09 11:52:26 -07003776done_metadata:
3777 for (pendingRequestIterator i = mPendingRequestsList.begin();
3778 i != mPendingRequestsList.end() ;i++) {
3779 i->pipeline_depth++;
3780 }
3781 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3782 unblockRequestIfNecessary();
3783}
3784
3785/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003786 * FUNCTION : handleDepthDataWithLock
3787 *
3788 * DESCRIPTION: Handles incoming depth data
3789 *
3790 * PARAMETERS : @depthData : Depth data
3791 * @frameNumber: Frame number of the incoming depth data
3792 *
3793 * RETURN :
3794 *
3795 *==========================================================================*/
3796void QCamera3HardwareInterface::handleDepthDataLocked(
3797 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3798 uint32_t currentFrameNumber;
3799 buffer_handle_t *depthBuffer;
3800
3801 if (nullptr == mDepthChannel) {
3802 LOGE("Depth channel not present!");
3803 return;
3804 }
3805
3806 camera3_stream_buffer_t resultBuffer =
3807 {.acquire_fence = -1,
3808 .release_fence = -1,
3809 .status = CAMERA3_BUFFER_STATUS_OK,
3810 .buffer = nullptr,
3811 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003812 do {
3813 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3814 if (nullptr == depthBuffer) {
3815 break;
3816 }
3817
Emilian Peev7650c122017-01-19 08:24:33 -08003818 resultBuffer.buffer = depthBuffer;
3819 if (currentFrameNumber == frameNumber) {
3820 int32_t rc = mDepthChannel->populateDepthData(depthData,
3821 frameNumber);
3822 if (NO_ERROR != rc) {
3823 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3824 } else {
3825 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3826 }
3827 } else if (currentFrameNumber > frameNumber) {
3828 break;
3829 } else {
3830 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3831 {{currentFrameNumber, mDepthChannel->getStream(),
3832 CAMERA3_MSG_ERROR_BUFFER}}};
3833 orchestrateNotify(&notify_msg);
3834
3835 LOGE("Depth buffer for frame number: %d is missing "
3836 "returning back!", currentFrameNumber);
3837 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3838 }
3839 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003840 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003841 } while (currentFrameNumber < frameNumber);
3842}
3843
3844/*===========================================================================
3845 * FUNCTION : notifyErrorFoPendingDepthData
3846 *
3847 * DESCRIPTION: Returns error for any pending depth buffers
3848 *
3849 * PARAMETERS : depthCh - depth channel that needs to get flushed
3850 *
3851 * RETURN :
3852 *
3853 *==========================================================================*/
3854void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3855 QCamera3DepthChannel *depthCh) {
3856 uint32_t currentFrameNumber;
3857 buffer_handle_t *depthBuffer;
3858
3859 if (nullptr == depthCh) {
3860 return;
3861 }
3862
3863 camera3_notify_msg_t notify_msg =
3864 {.type = CAMERA3_MSG_ERROR,
3865 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3866 camera3_stream_buffer_t resultBuffer =
3867 {.acquire_fence = -1,
3868 .release_fence = -1,
3869 .buffer = nullptr,
3870 .stream = depthCh->getStream(),
3871 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003872
3873 while (nullptr !=
3874 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3875 depthCh->unmapBuffer(currentFrameNumber);
3876
3877 notify_msg.message.error.frame_number = currentFrameNumber;
3878 orchestrateNotify(&notify_msg);
3879
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003880 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003881 };
3882}
3883
3884/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003885 * FUNCTION : hdrPlusPerfLock
3886 *
3887 * DESCRIPTION: perf lock for HDR+ using custom intent
3888 *
3889 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3890 *
3891 * RETURN : None
3892 *
3893 *==========================================================================*/
3894void QCamera3HardwareInterface::hdrPlusPerfLock(
3895 mm_camera_super_buf_t *metadata_buf)
3896{
3897 if (NULL == metadata_buf) {
3898 LOGE("metadata_buf is NULL");
3899 return;
3900 }
3901 metadata_buffer_t *metadata =
3902 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3903 int32_t *p_frame_number_valid =
3904 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3905 uint32_t *p_frame_number =
3906 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3907
3908 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3909 LOGE("%s: Invalid metadata", __func__);
3910 return;
3911 }
3912
3913 //acquire perf lock for 5 sec after the last HDR frame is captured
3914 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3915 if ((p_frame_number != NULL) &&
3916 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003917 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003918 }
3919 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003920}
3921
3922/*===========================================================================
3923 * FUNCTION : handleInputBufferWithLock
3924 *
3925 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3926 *
3927 * PARAMETERS : @frame_number: frame number of the input buffer
3928 *
3929 * RETURN :
3930 *
3931 *==========================================================================*/
3932void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3933{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003934 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003935 pendingRequestIterator i = mPendingRequestsList.begin();
3936 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3937 i++;
3938 }
3939 if (i != mPendingRequestsList.end() && i->input_buffer) {
3940 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003941 CameraMetadata settings;
3942 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3943 if(i->settings) {
3944 settings = i->settings;
3945 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3946 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07003947 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003948 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07003949 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003950 } else {
3951 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07003952 }
3953
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003954 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3955 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3956 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07003957
3958 camera3_capture_result result;
3959 memset(&result, 0, sizeof(camera3_capture_result));
3960 result.frame_number = frame_number;
3961 result.result = i->settings;
3962 result.input_buffer = i->input_buffer;
3963 result.partial_result = PARTIAL_RESULT_COUNT;
3964
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003965 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003966 LOGD("Input request metadata and input buffer frame_number = %u",
3967 i->frame_number);
3968 i = erasePendingRequest(i);
3969 } else {
3970 LOGE("Could not find input request for frame number %d", frame_number);
3971 }
3972}
3973
3974/*===========================================================================
3975 * FUNCTION : handleBufferWithLock
3976 *
3977 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3978 *
3979 * PARAMETERS : @buffer: image buffer for the callback
3980 * @frame_number: frame number of the image buffer
3981 *
3982 * RETURN :
3983 *
3984 *==========================================================================*/
3985void QCamera3HardwareInterface::handleBufferWithLock(
3986 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3987{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003988 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003989
3990 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3991 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3992 }
3993
Thierry Strudel3d639192016-09-09 11:52:26 -07003994 /* Nothing to be done during error state */
3995 if ((ERROR == mState) || (DEINIT == mState)) {
3996 return;
3997 }
3998 if (mFlushPerf) {
3999 handleBuffersDuringFlushLock(buffer);
4000 return;
4001 }
4002 //not in flush
4003 // If the frame number doesn't exist in the pending request list,
4004 // directly send the buffer to the frameworks, and update pending buffers map
4005 // Otherwise, book-keep the buffer.
4006 pendingRequestIterator i = mPendingRequestsList.begin();
4007 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4008 i++;
4009 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004010
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004011 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004012 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004013 // For a reprocessing request, try to send out result metadata.
4014 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004015 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004016 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004017
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004018 // Check if this frame was dropped.
4019 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4020 m != mPendingFrameDropList.end(); m++) {
4021 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4022 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4023 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4024 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4025 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4026 frame_number, streamID);
4027 m = mPendingFrameDropList.erase(m);
4028 break;
4029 }
4030 }
4031
4032 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4033 LOGH("result frame_number = %d, buffer = %p",
4034 frame_number, buffer->buffer);
4035
4036 mPendingBuffersMap.removeBuf(buffer->buffer);
4037 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4038
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004039 if (mPreviewStarted == false) {
4040 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4041 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004042 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4043
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004044 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4045 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4046 mPreviewStarted = true;
4047
4048 // Set power hint for preview
4049 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4050 }
4051 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004052}
4053
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004054void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004055 const camera_metadata_t *resultMetadata)
4056{
4057 // Find the pending request for this result metadata.
4058 auto requestIter = mPendingRequestsList.begin();
4059 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4060 requestIter++;
4061 }
4062
4063 if (requestIter == mPendingRequestsList.end()) {
4064 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4065 return;
4066 }
4067
4068 // Update the result metadata
4069 requestIter->resultMetadata = resultMetadata;
4070
4071 // Check what type of request this is.
4072 bool liveRequest = false;
4073 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004074 // HDR+ request doesn't have partial results.
4075 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004076 } else if (requestIter->input_buffer != nullptr) {
4077 // Reprocessing request result is the same as settings.
4078 requestIter->resultMetadata = requestIter->settings;
4079 // Reprocessing request doesn't have partial results.
4080 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4081 } else {
4082 liveRequest = true;
4083 requestIter->partial_result_cnt++;
4084 mPendingLiveRequest--;
4085
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004086 {
4087 Mutex::Autolock l(gHdrPlusClientLock);
4088 // For a live request, send the metadata to HDR+ client.
4089 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4090 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4091 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4092 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004093 }
4094 }
4095
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004096 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4097 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004098 bool readyToSend = true;
4099
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004100 // Iterate through the pending requests to send out result metadata that are ready. Also if
4101 // this result metadata belongs to a live request, notify errors for previous live requests
4102 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004103 auto iter = mPendingRequestsList.begin();
4104 while (iter != mPendingRequestsList.end()) {
4105 // Check if current pending request is ready. If it's not ready, the following pending
4106 // requests are also not ready.
4107 if (readyToSend && iter->resultMetadata == nullptr) {
4108 readyToSend = false;
4109 }
4110
4111 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4112
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004113 camera3_capture_result_t result = {};
4114 result.frame_number = iter->frame_number;
4115 result.result = iter->resultMetadata;
4116 result.partial_result = iter->partial_result_cnt;
4117
4118 // If this pending buffer has result metadata, we may be able to send out shutter callback
4119 // and result metadata.
4120 if (iter->resultMetadata != nullptr) {
4121 if (!readyToSend) {
4122 // If any of the previous pending request is not ready, this pending request is
4123 // also not ready to send in order to keep shutter callbacks and result metadata
4124 // in order.
4125 iter++;
4126 continue;
4127 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004128 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4129 // If the result metadata belongs to a live request, notify errors for previous pending
4130 // live requests.
4131 mPendingLiveRequest--;
4132
4133 CameraMetadata dummyMetadata;
4134 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4135 result.result = dummyMetadata.release();
4136
4137 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004138
4139 // partial_result should be PARTIAL_RESULT_CNT in case of
4140 // ERROR_RESULT.
4141 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4142 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004143 } else {
4144 iter++;
4145 continue;
4146 }
4147
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004148 result.output_buffers = nullptr;
4149 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004150 orchestrateResult(&result);
4151
4152 // For reprocessing, result metadata is the same as settings so do not free it here to
4153 // avoid double free.
4154 if (result.result != iter->settings) {
4155 free_camera_metadata((camera_metadata_t *)result.result);
4156 }
4157 iter->resultMetadata = nullptr;
4158 iter = erasePendingRequest(iter);
4159 }
4160
4161 if (liveRequest) {
4162 for (auto &iter : mPendingRequestsList) {
4163 // Increment pipeline depth for the following pending requests.
4164 if (iter.frame_number > frameNumber) {
4165 iter.pipeline_depth++;
4166 }
4167 }
4168 }
4169
4170 unblockRequestIfNecessary();
4171}
4172
Thierry Strudel3d639192016-09-09 11:52:26 -07004173/*===========================================================================
4174 * FUNCTION : unblockRequestIfNecessary
4175 *
4176 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4177 * that mMutex is held when this function is called.
4178 *
4179 * PARAMETERS :
4180 *
4181 * RETURN :
4182 *
4183 *==========================================================================*/
4184void QCamera3HardwareInterface::unblockRequestIfNecessary()
4185{
4186 // Unblock process_capture_request
4187 pthread_cond_signal(&mRequestCond);
4188}
4189
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004190/*===========================================================================
4191 * FUNCTION : isHdrSnapshotRequest
4192 *
4193 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4194 *
4195 * PARAMETERS : camera3 request structure
4196 *
4197 * RETURN : boolean decision variable
4198 *
4199 *==========================================================================*/
4200bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4201{
4202 if (request == NULL) {
4203 LOGE("Invalid request handle");
4204 assert(0);
4205 return false;
4206 }
4207
4208 if (!mForceHdrSnapshot) {
4209 CameraMetadata frame_settings;
4210 frame_settings = request->settings;
4211
4212 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4213 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4214 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4215 return false;
4216 }
4217 } else {
4218 return false;
4219 }
4220
4221 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4222 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4223 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4224 return false;
4225 }
4226 } else {
4227 return false;
4228 }
4229 }
4230
4231 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4232 if (request->output_buffers[i].stream->format
4233 == HAL_PIXEL_FORMAT_BLOB) {
4234 return true;
4235 }
4236 }
4237
4238 return false;
4239}
4240/*===========================================================================
4241 * FUNCTION : orchestrateRequest
4242 *
4243 * DESCRIPTION: Orchestrates a capture request from camera service
4244 *
4245 * PARAMETERS :
4246 * @request : request from framework to process
4247 *
4248 * RETURN : Error status codes
4249 *
4250 *==========================================================================*/
4251int32_t QCamera3HardwareInterface::orchestrateRequest(
4252 camera3_capture_request_t *request)
4253{
4254
4255 uint32_t originalFrameNumber = request->frame_number;
4256 uint32_t originalOutputCount = request->num_output_buffers;
4257 const camera_metadata_t *original_settings = request->settings;
4258 List<InternalRequest> internallyRequestedStreams;
4259 List<InternalRequest> emptyInternalList;
4260
4261 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4262 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4263 uint32_t internalFrameNumber;
4264 CameraMetadata modified_meta;
4265
4266
4267 /* Add Blob channel to list of internally requested streams */
4268 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4269 if (request->output_buffers[i].stream->format
4270 == HAL_PIXEL_FORMAT_BLOB) {
4271 InternalRequest streamRequested;
4272 streamRequested.meteringOnly = 1;
4273 streamRequested.need_metadata = 0;
4274 streamRequested.stream = request->output_buffers[i].stream;
4275 internallyRequestedStreams.push_back(streamRequested);
4276 }
4277 }
4278 request->num_output_buffers = 0;
4279 auto itr = internallyRequestedStreams.begin();
4280
4281 /* Modify setting to set compensation */
4282 modified_meta = request->settings;
4283 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4284 uint8_t aeLock = 1;
4285 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4286 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4287 camera_metadata_t *modified_settings = modified_meta.release();
4288 request->settings = modified_settings;
4289
4290 /* Capture Settling & -2x frame */
4291 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4292 request->frame_number = internalFrameNumber;
4293 processCaptureRequest(request, internallyRequestedStreams);
4294
4295 request->num_output_buffers = originalOutputCount;
4296 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4297 request->frame_number = internalFrameNumber;
4298 processCaptureRequest(request, emptyInternalList);
4299 request->num_output_buffers = 0;
4300
4301 modified_meta = modified_settings;
4302 expCompensation = 0;
4303 aeLock = 1;
4304 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4305 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4306 modified_settings = modified_meta.release();
4307 request->settings = modified_settings;
4308
4309 /* Capture Settling & 0X frame */
4310
4311 itr = internallyRequestedStreams.begin();
4312 if (itr == internallyRequestedStreams.end()) {
4313 LOGE("Error Internally Requested Stream list is empty");
4314 assert(0);
4315 } else {
4316 itr->need_metadata = 0;
4317 itr->meteringOnly = 1;
4318 }
4319
4320 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4321 request->frame_number = internalFrameNumber;
4322 processCaptureRequest(request, internallyRequestedStreams);
4323
4324 itr = internallyRequestedStreams.begin();
4325 if (itr == internallyRequestedStreams.end()) {
4326 ALOGE("Error Internally Requested Stream list is empty");
4327 assert(0);
4328 } else {
4329 itr->need_metadata = 1;
4330 itr->meteringOnly = 0;
4331 }
4332
4333 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4334 request->frame_number = internalFrameNumber;
4335 processCaptureRequest(request, internallyRequestedStreams);
4336
4337 /* Capture 2X frame*/
4338 modified_meta = modified_settings;
4339 expCompensation = GB_HDR_2X_STEP_EV;
4340 aeLock = 1;
4341 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4342 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4343 modified_settings = modified_meta.release();
4344 request->settings = modified_settings;
4345
4346 itr = internallyRequestedStreams.begin();
4347 if (itr == internallyRequestedStreams.end()) {
4348 ALOGE("Error Internally Requested Stream list is empty");
4349 assert(0);
4350 } else {
4351 itr->need_metadata = 0;
4352 itr->meteringOnly = 1;
4353 }
4354 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4355 request->frame_number = internalFrameNumber;
4356 processCaptureRequest(request, internallyRequestedStreams);
4357
4358 itr = internallyRequestedStreams.begin();
4359 if (itr == internallyRequestedStreams.end()) {
4360 ALOGE("Error Internally Requested Stream list is empty");
4361 assert(0);
4362 } else {
4363 itr->need_metadata = 1;
4364 itr->meteringOnly = 0;
4365 }
4366
4367 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4368 request->frame_number = internalFrameNumber;
4369 processCaptureRequest(request, internallyRequestedStreams);
4370
4371
4372 /* Capture 2X on original streaming config*/
4373 internallyRequestedStreams.clear();
4374
4375 /* Restore original settings pointer */
4376 request->settings = original_settings;
4377 } else {
4378 uint32_t internalFrameNumber;
4379 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4380 request->frame_number = internalFrameNumber;
4381 return processCaptureRequest(request, internallyRequestedStreams);
4382 }
4383
4384 return NO_ERROR;
4385}
4386
4387/*===========================================================================
4388 * FUNCTION : orchestrateResult
4389 *
4390 * DESCRIPTION: Orchestrates a capture result to camera service
4391 *
4392 * PARAMETERS :
4393 * @request : request from framework to process
4394 *
4395 * RETURN :
4396 *
4397 *==========================================================================*/
4398void QCamera3HardwareInterface::orchestrateResult(
4399 camera3_capture_result_t *result)
4400{
4401 uint32_t frameworkFrameNumber;
4402 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4403 frameworkFrameNumber);
4404 if (rc != NO_ERROR) {
4405 LOGE("Cannot find translated frameworkFrameNumber");
4406 assert(0);
4407 } else {
4408 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004409 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004410 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004411 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004412 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4413 camera_metadata_entry_t entry;
4414 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4415 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004416 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004417 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4418 if (ret != OK)
4419 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004420 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004421 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004422 result->frame_number = frameworkFrameNumber;
4423 mCallbackOps->process_capture_result(mCallbackOps, result);
4424 }
4425 }
4426}
4427
4428/*===========================================================================
4429 * FUNCTION : orchestrateNotify
4430 *
4431 * DESCRIPTION: Orchestrates a notify to camera service
4432 *
4433 * PARAMETERS :
4434 * @request : request from framework to process
4435 *
4436 * RETURN :
4437 *
4438 *==========================================================================*/
4439void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4440{
4441 uint32_t frameworkFrameNumber;
4442 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004443 int32_t rc = NO_ERROR;
4444
4445 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004446 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004447
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004448 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004449 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4450 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4451 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004452 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004453 LOGE("Cannot find translated frameworkFrameNumber");
4454 assert(0);
4455 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004456 }
4457 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004458
4459 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4460 LOGD("Internal Request drop the notifyCb");
4461 } else {
4462 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4463 mCallbackOps->notify(mCallbackOps, notify_msg);
4464 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004465}
4466
4467/*===========================================================================
4468 * FUNCTION : FrameNumberRegistry
4469 *
4470 * DESCRIPTION: Constructor
4471 *
4472 * PARAMETERS :
4473 *
4474 * RETURN :
4475 *
4476 *==========================================================================*/
4477FrameNumberRegistry::FrameNumberRegistry()
4478{
4479 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4480}
4481
4482/*===========================================================================
4483 * FUNCTION : ~FrameNumberRegistry
4484 *
4485 * DESCRIPTION: Destructor
4486 *
4487 * PARAMETERS :
4488 *
4489 * RETURN :
4490 *
4491 *==========================================================================*/
4492FrameNumberRegistry::~FrameNumberRegistry()
4493{
4494}
4495
4496/*===========================================================================
4497 * FUNCTION : PurgeOldEntriesLocked
4498 *
4499 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4500 *
4501 * PARAMETERS :
4502 *
4503 * RETURN : NONE
4504 *
4505 *==========================================================================*/
4506void FrameNumberRegistry::purgeOldEntriesLocked()
4507{
4508 while (_register.begin() != _register.end()) {
4509 auto itr = _register.begin();
4510 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4511 _register.erase(itr);
4512 } else {
4513 return;
4514 }
4515 }
4516}
4517
4518/*===========================================================================
4519 * FUNCTION : allocStoreInternalFrameNumber
4520 *
4521 * DESCRIPTION: Method to note down a framework request and associate a new
4522 * internal request number against it
4523 *
4524 * PARAMETERS :
4525 * @fFrameNumber: Identifier given by framework
4526 * @internalFN : Output parameter which will have the newly generated internal
4527 * entry
4528 *
4529 * RETURN : Error code
4530 *
4531 *==========================================================================*/
4532int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4533 uint32_t &internalFrameNumber)
4534{
4535 Mutex::Autolock lock(mRegistryLock);
4536 internalFrameNumber = _nextFreeInternalNumber++;
4537 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4538 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4539 purgeOldEntriesLocked();
4540 return NO_ERROR;
4541}
4542
4543/*===========================================================================
4544 * FUNCTION : generateStoreInternalFrameNumber
4545 *
4546 * DESCRIPTION: Method to associate a new internal request number independent
4547 * of any associate with framework requests
4548 *
4549 * PARAMETERS :
4550 * @internalFrame#: Output parameter which will have the newly generated internal
4551 *
4552 *
4553 * RETURN : Error code
4554 *
4555 *==========================================================================*/
4556int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4557{
4558 Mutex::Autolock lock(mRegistryLock);
4559 internalFrameNumber = _nextFreeInternalNumber++;
4560 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4561 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4562 purgeOldEntriesLocked();
4563 return NO_ERROR;
4564}
4565
4566/*===========================================================================
4567 * FUNCTION : getFrameworkFrameNumber
4568 *
4569 * DESCRIPTION: Method to query the framework framenumber given an internal #
4570 *
4571 * PARAMETERS :
4572 * @internalFrame#: Internal reference
4573 * @frameworkframenumber: Output parameter holding framework frame entry
4574 *
4575 * RETURN : Error code
4576 *
4577 *==========================================================================*/
4578int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4579 uint32_t &frameworkFrameNumber)
4580{
4581 Mutex::Autolock lock(mRegistryLock);
4582 auto itr = _register.find(internalFrameNumber);
4583 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004584 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004585 return -ENOENT;
4586 }
4587
4588 frameworkFrameNumber = itr->second;
4589 purgeOldEntriesLocked();
4590 return NO_ERROR;
4591}
Thierry Strudel3d639192016-09-09 11:52:26 -07004592
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004593status_t QCamera3HardwareInterface::fillPbStreamConfig(
4594 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4595 QCamera3Channel *channel, uint32_t streamIndex) {
4596 if (config == nullptr) {
4597 LOGE("%s: config is null", __FUNCTION__);
4598 return BAD_VALUE;
4599 }
4600
4601 if (channel == nullptr) {
4602 LOGE("%s: channel is null", __FUNCTION__);
4603 return BAD_VALUE;
4604 }
4605
4606 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4607 if (stream == nullptr) {
4608 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4609 return NAME_NOT_FOUND;
4610 }
4611
4612 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4613 if (streamInfo == nullptr) {
4614 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4615 return NAME_NOT_FOUND;
4616 }
4617
4618 config->id = pbStreamId;
4619 config->image.width = streamInfo->dim.width;
4620 config->image.height = streamInfo->dim.height;
4621 config->image.padding = 0;
4622 config->image.format = pbStreamFormat;
4623
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004624 uint32_t totalPlaneSize = 0;
4625
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004626 // Fill plane information.
4627 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4628 pbcamera::PlaneConfiguration plane;
4629 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4630 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4631 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004632
4633 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004634 }
4635
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004636 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004637 return OK;
4638}
4639
Thierry Strudel3d639192016-09-09 11:52:26 -07004640/*===========================================================================
4641 * FUNCTION : processCaptureRequest
4642 *
4643 * DESCRIPTION: process a capture request from camera service
4644 *
4645 * PARAMETERS :
4646 * @request : request from framework to process
4647 *
4648 * RETURN :
4649 *
4650 *==========================================================================*/
4651int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004652 camera3_capture_request_t *request,
4653 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004654{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004655 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004656 int rc = NO_ERROR;
4657 int32_t request_id;
4658 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004659 bool isVidBufRequested = false;
4660 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004661 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004662
4663 pthread_mutex_lock(&mMutex);
4664
4665 // Validate current state
4666 switch (mState) {
4667 case CONFIGURED:
4668 case STARTED:
4669 /* valid state */
4670 break;
4671
4672 case ERROR:
4673 pthread_mutex_unlock(&mMutex);
4674 handleCameraDeviceError();
4675 return -ENODEV;
4676
4677 default:
4678 LOGE("Invalid state %d", mState);
4679 pthread_mutex_unlock(&mMutex);
4680 return -ENODEV;
4681 }
4682
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004683 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004684 if (rc != NO_ERROR) {
4685 LOGE("incoming request is not valid");
4686 pthread_mutex_unlock(&mMutex);
4687 return rc;
4688 }
4689
4690 meta = request->settings;
4691
4692 // For first capture request, send capture intent, and
4693 // stream on all streams
4694 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004695 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004696 // send an unconfigure to the backend so that the isp
4697 // resources are deallocated
4698 if (!mFirstConfiguration) {
4699 cam_stream_size_info_t stream_config_info;
4700 int32_t hal_version = CAM_HAL_V3;
4701 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4702 stream_config_info.buffer_info.min_buffers =
4703 MIN_INFLIGHT_REQUESTS;
4704 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004705 m_bIs4KVideo ? 0 :
4706 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004707 clear_metadata_buffer(mParameters);
4708 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4709 CAM_INTF_PARM_HAL_VERSION, hal_version);
4710 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4711 CAM_INTF_META_STREAM_INFO, stream_config_info);
4712 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4713 mParameters);
4714 if (rc < 0) {
4715 LOGE("set_parms for unconfigure failed");
4716 pthread_mutex_unlock(&mMutex);
4717 return rc;
4718 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004719
Thierry Strudel3d639192016-09-09 11:52:26 -07004720 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004721 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004722 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004723 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004724 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004725 property_get("persist.camera.is_type", is_type_value, "4");
4726 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4727 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4728 property_get("persist.camera.is_type_preview", is_type_value, "4");
4729 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4730 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004731
4732 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4733 int32_t hal_version = CAM_HAL_V3;
4734 uint8_t captureIntent =
4735 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4736 mCaptureIntent = captureIntent;
4737 clear_metadata_buffer(mParameters);
4738 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4739 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4740 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004741 if (mFirstConfiguration) {
4742 // configure instant AEC
4743 // Instant AEC is a session based parameter and it is needed only
4744 // once per complete session after open camera.
4745 // i.e. This is set only once for the first capture request, after open camera.
4746 setInstantAEC(meta);
4747 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004748 uint8_t fwkVideoStabMode=0;
4749 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4750 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4751 }
4752
Xue Tuecac74e2017-04-17 13:58:15 -07004753 // If EIS setprop is enabled then only turn it on for video/preview
4754 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004755 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004756 int32_t vsMode;
4757 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4758 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4759 rc = BAD_VALUE;
4760 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004761 LOGD("setEis %d", setEis);
4762 bool eis3Supported = false;
4763 size_t count = IS_TYPE_MAX;
4764 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4765 for (size_t i = 0; i < count; i++) {
4766 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4767 eis3Supported = true;
4768 break;
4769 }
4770 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004771
4772 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004773 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004774 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4775 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004776 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4777 is_type = isTypePreview;
4778 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4779 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4780 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004781 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004782 } else {
4783 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004784 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004785 } else {
4786 is_type = IS_TYPE_NONE;
4787 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004788 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004789 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004790 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4791 }
4792 }
4793
4794 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4795 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4796
Thierry Strudel54dc9782017-02-15 12:12:10 -08004797 //Disable tintless only if the property is set to 0
4798 memset(prop, 0, sizeof(prop));
4799 property_get("persist.camera.tintless.enable", prop, "1");
4800 int32_t tintless_value = atoi(prop);
4801
Thierry Strudel3d639192016-09-09 11:52:26 -07004802 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4803 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004804
Thierry Strudel3d639192016-09-09 11:52:26 -07004805 //Disable CDS for HFR mode or if DIS/EIS is on.
4806 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4807 //after every configure_stream
4808 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4809 (m_bIsVideo)) {
4810 int32_t cds = CAM_CDS_MODE_OFF;
4811 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4812 CAM_INTF_PARM_CDS_MODE, cds))
4813 LOGE("Failed to disable CDS for HFR mode");
4814
4815 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004816
4817 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4818 uint8_t* use_av_timer = NULL;
4819
4820 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004821 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004822 use_av_timer = &m_debug_avtimer;
4823 }
4824 else{
4825 use_av_timer =
4826 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004827 if (use_av_timer) {
4828 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4829 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004830 }
4831
4832 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4833 rc = BAD_VALUE;
4834 }
4835 }
4836
Thierry Strudel3d639192016-09-09 11:52:26 -07004837 setMobicat();
4838
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004839 uint8_t nrMode = 0;
4840 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4841 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4842 }
4843
Thierry Strudel3d639192016-09-09 11:52:26 -07004844 /* Set fps and hfr mode while sending meta stream info so that sensor
4845 * can configure appropriate streaming mode */
4846 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004847 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4848 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004849 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4850 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004851 if (rc == NO_ERROR) {
4852 int32_t max_fps =
4853 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004854 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004855 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4856 }
4857 /* For HFR, more buffers are dequeued upfront to improve the performance */
4858 if (mBatchSize) {
4859 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4860 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4861 }
4862 }
4863 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004864 LOGE("setHalFpsRange failed");
4865 }
4866 }
4867 if (meta.exists(ANDROID_CONTROL_MODE)) {
4868 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4869 rc = extractSceneMode(meta, metaMode, mParameters);
4870 if (rc != NO_ERROR) {
4871 LOGE("extractSceneMode failed");
4872 }
4873 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004874 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004875
Thierry Strudel04e026f2016-10-10 11:27:36 -07004876 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4877 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4878 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4879 rc = setVideoHdrMode(mParameters, vhdr);
4880 if (rc != NO_ERROR) {
4881 LOGE("setVideoHDR is failed");
4882 }
4883 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004884
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004885 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004886 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004887 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004888 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4889 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4890 sensorModeFullFov)) {
4891 rc = BAD_VALUE;
4892 }
4893 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004894 //TODO: validate the arguments, HSV scenemode should have only the
4895 //advertised fps ranges
4896
4897 /*set the capture intent, hal version, tintless, stream info,
4898 *and disenable parameters to the backend*/
4899 LOGD("set_parms META_STREAM_INFO " );
4900 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004901 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4902 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004903 mStreamConfigInfo.type[i],
4904 mStreamConfigInfo.stream_sizes[i].width,
4905 mStreamConfigInfo.stream_sizes[i].height,
4906 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004907 mStreamConfigInfo.format[i],
4908 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004909 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004910
Thierry Strudel3d639192016-09-09 11:52:26 -07004911 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4912 mParameters);
4913 if (rc < 0) {
4914 LOGE("set_parms failed for hal version, stream info");
4915 }
4916
Chien-Yu Chenee335912017-02-09 17:53:20 -08004917 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4918 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004919 if (rc != NO_ERROR) {
4920 LOGE("Failed to get sensor output size");
4921 pthread_mutex_unlock(&mMutex);
4922 goto error_exit;
4923 }
4924
4925 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4926 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004927 mSensorModeInfo.active_array_size.width,
4928 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004929
4930 /* Set batchmode before initializing channel. Since registerBuffer
4931 * internally initializes some of the channels, better set batchmode
4932 * even before first register buffer */
4933 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4934 it != mStreamInfo.end(); it++) {
4935 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4936 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4937 && mBatchSize) {
4938 rc = channel->setBatchSize(mBatchSize);
4939 //Disable per frame map unmap for HFR/batchmode case
4940 rc |= channel->setPerFrameMapUnmap(false);
4941 if (NO_ERROR != rc) {
4942 LOGE("Channel init failed %d", rc);
4943 pthread_mutex_unlock(&mMutex);
4944 goto error_exit;
4945 }
4946 }
4947 }
4948
4949 //First initialize all streams
4950 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4951 it != mStreamInfo.end(); it++) {
4952 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004953
4954 /* Initial value of NR mode is needed before stream on */
4955 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07004956 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4957 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004958 setEis) {
4959 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4960 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4961 is_type = mStreamConfigInfo.is_type[i];
4962 break;
4963 }
4964 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004965 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004966 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004967 rc = channel->initialize(IS_TYPE_NONE);
4968 }
4969 if (NO_ERROR != rc) {
4970 LOGE("Channel initialization failed %d", rc);
4971 pthread_mutex_unlock(&mMutex);
4972 goto error_exit;
4973 }
4974 }
4975
4976 if (mRawDumpChannel) {
4977 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4978 if (rc != NO_ERROR) {
4979 LOGE("Error: Raw Dump Channel init failed");
4980 pthread_mutex_unlock(&mMutex);
4981 goto error_exit;
4982 }
4983 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004984 if (mHdrPlusRawSrcChannel) {
4985 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4986 if (rc != NO_ERROR) {
4987 LOGE("Error: HDR+ RAW Source Channel init failed");
4988 pthread_mutex_unlock(&mMutex);
4989 goto error_exit;
4990 }
4991 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004992 if (mSupportChannel) {
4993 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4994 if (rc < 0) {
4995 LOGE("Support channel initialization failed");
4996 pthread_mutex_unlock(&mMutex);
4997 goto error_exit;
4998 }
4999 }
5000 if (mAnalysisChannel) {
5001 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5002 if (rc < 0) {
5003 LOGE("Analysis channel initialization failed");
5004 pthread_mutex_unlock(&mMutex);
5005 goto error_exit;
5006 }
5007 }
5008 if (mDummyBatchChannel) {
5009 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5010 if (rc < 0) {
5011 LOGE("mDummyBatchChannel setBatchSize failed");
5012 pthread_mutex_unlock(&mMutex);
5013 goto error_exit;
5014 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005015 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005016 if (rc < 0) {
5017 LOGE("mDummyBatchChannel initialization failed");
5018 pthread_mutex_unlock(&mMutex);
5019 goto error_exit;
5020 }
5021 }
5022
5023 // Set bundle info
5024 rc = setBundleInfo();
5025 if (rc < 0) {
5026 LOGE("setBundleInfo failed %d", rc);
5027 pthread_mutex_unlock(&mMutex);
5028 goto error_exit;
5029 }
5030
5031 //update settings from app here
5032 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5033 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5034 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5035 }
5036 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5037 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5038 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5039 }
5040 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5041 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5042 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5043
5044 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5045 (mLinkedCameraId != mCameraId) ) {
5046 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5047 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005048 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005049 goto error_exit;
5050 }
5051 }
5052
5053 // add bundle related cameras
5054 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5055 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005056 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5057 &m_pDualCamCmdPtr->bundle_info;
5058 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005059 if (mIsDeviceLinked)
5060 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5061 else
5062 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5063
5064 pthread_mutex_lock(&gCamLock);
5065
5066 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5067 LOGE("Dualcam: Invalid Session Id ");
5068 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005069 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005070 goto error_exit;
5071 }
5072
5073 if (mIsMainCamera == 1) {
5074 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5075 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005076 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005077 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005078 // related session id should be session id of linked session
5079 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5080 } else {
5081 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5082 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005083 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005084 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005085 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5086 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005087 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005088 pthread_mutex_unlock(&gCamLock);
5089
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005090 rc = mCameraHandle->ops->set_dual_cam_cmd(
5091 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005092 if (rc < 0) {
5093 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005094 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005095 goto error_exit;
5096 }
5097 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005098 goto no_error;
5099error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005100 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005101 return rc;
5102no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005103 mWokenUpByDaemon = false;
5104 mPendingLiveRequest = 0;
5105 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005106 }
5107
5108 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005109 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005110
5111 if (mFlushPerf) {
5112 //we cannot accept any requests during flush
5113 LOGE("process_capture_request cannot proceed during flush");
5114 pthread_mutex_unlock(&mMutex);
5115 return NO_ERROR; //should return an error
5116 }
5117
5118 if (meta.exists(ANDROID_REQUEST_ID)) {
5119 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5120 mCurrentRequestId = request_id;
5121 LOGD("Received request with id: %d", request_id);
5122 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5123 LOGE("Unable to find request id field, \
5124 & no previous id available");
5125 pthread_mutex_unlock(&mMutex);
5126 return NAME_NOT_FOUND;
5127 } else {
5128 LOGD("Re-using old request id");
5129 request_id = mCurrentRequestId;
5130 }
5131
5132 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5133 request->num_output_buffers,
5134 request->input_buffer,
5135 frameNumber);
5136 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005137 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005138 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005139 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005140 uint32_t snapshotStreamId = 0;
5141 for (size_t i = 0; i < request->num_output_buffers; i++) {
5142 const camera3_stream_buffer_t& output = request->output_buffers[i];
5143 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5144
Emilian Peev7650c122017-01-19 08:24:33 -08005145 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5146 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005147 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005148 blob_request = 1;
5149 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5150 }
5151
5152 if (output.acquire_fence != -1) {
5153 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5154 close(output.acquire_fence);
5155 if (rc != OK) {
5156 LOGE("sync wait failed %d", rc);
5157 pthread_mutex_unlock(&mMutex);
5158 return rc;
5159 }
5160 }
5161
Emilian Peev0f3c3162017-03-15 12:57:46 +00005162 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5163 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005164 depthRequestPresent = true;
5165 continue;
5166 }
5167
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005168 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005169 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005170
5171 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5172 isVidBufRequested = true;
5173 }
5174 }
5175
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005176 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5177 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5178 itr++) {
5179 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5180 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5181 channel->getStreamID(channel->getStreamTypeMask());
5182
5183 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5184 isVidBufRequested = true;
5185 }
5186 }
5187
Thierry Strudel3d639192016-09-09 11:52:26 -07005188 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005189 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005190 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005191 }
5192 if (blob_request && mRawDumpChannel) {
5193 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005194 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005195 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005196 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005197 }
5198
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005199 {
5200 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5201 // Request a RAW buffer if
5202 // 1. mHdrPlusRawSrcChannel is valid.
5203 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5204 // 3. There is no pending HDR+ request.
5205 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5206 mHdrPlusPendingRequests.size() == 0) {
5207 streamsArray.stream_request[streamsArray.num_streams].streamID =
5208 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5209 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5210 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005211 }
5212
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005213 //extract capture intent
5214 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5215 mCaptureIntent =
5216 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5217 }
5218
5219 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5220 mCacMode =
5221 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5222 }
5223
5224 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005225 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005226
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005227 {
5228 Mutex::Autolock l(gHdrPlusClientLock);
5229 // If this request has a still capture intent, try to submit an HDR+ request.
5230 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5231 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5232 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5233 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005234 }
5235
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005236 if (hdrPlusRequest) {
5237 // For a HDR+ request, just set the frame parameters.
5238 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5239 if (rc < 0) {
5240 LOGE("fail to set frame parameters");
5241 pthread_mutex_unlock(&mMutex);
5242 return rc;
5243 }
5244 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005245 /* Parse the settings:
5246 * - For every request in NORMAL MODE
5247 * - For every request in HFR mode during preview only case
5248 * - For first request of every batch in HFR mode during video
5249 * recording. In batchmode the same settings except frame number is
5250 * repeated in each request of the batch.
5251 */
5252 if (!mBatchSize ||
5253 (mBatchSize && !isVidBufRequested) ||
5254 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005255 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005256 if (rc < 0) {
5257 LOGE("fail to set frame parameters");
5258 pthread_mutex_unlock(&mMutex);
5259 return rc;
5260 }
5261 }
5262 /* For batchMode HFR, setFrameParameters is not called for every
5263 * request. But only frame number of the latest request is parsed.
5264 * Keep track of first and last frame numbers in a batch so that
5265 * metadata for the frame numbers of batch can be duplicated in
5266 * handleBatchMetadta */
5267 if (mBatchSize) {
5268 if (!mToBeQueuedVidBufs) {
5269 //start of the batch
5270 mFirstFrameNumberInBatch = request->frame_number;
5271 }
5272 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5273 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5274 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005275 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005276 return BAD_VALUE;
5277 }
5278 }
5279 if (mNeedSensorRestart) {
5280 /* Unlock the mutex as restartSensor waits on the channels to be
5281 * stopped, which in turn calls stream callback functions -
5282 * handleBufferWithLock and handleMetadataWithLock */
5283 pthread_mutex_unlock(&mMutex);
5284 rc = dynamicUpdateMetaStreamInfo();
5285 if (rc != NO_ERROR) {
5286 LOGE("Restarting the sensor failed");
5287 return BAD_VALUE;
5288 }
5289 mNeedSensorRestart = false;
5290 pthread_mutex_lock(&mMutex);
5291 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005292 if(mResetInstantAEC) {
5293 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5294 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5295 mResetInstantAEC = false;
5296 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005297 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005298 if (request->input_buffer->acquire_fence != -1) {
5299 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5300 close(request->input_buffer->acquire_fence);
5301 if (rc != OK) {
5302 LOGE("input buffer sync wait failed %d", rc);
5303 pthread_mutex_unlock(&mMutex);
5304 return rc;
5305 }
5306 }
5307 }
5308
5309 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5310 mLastCustIntentFrmNum = frameNumber;
5311 }
5312 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005313 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005314 pendingRequestIterator latestRequest;
5315 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005316 pendingRequest.num_buffers = depthRequestPresent ?
5317 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005318 pendingRequest.request_id = request_id;
5319 pendingRequest.blob_request = blob_request;
5320 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005321 if (request->input_buffer) {
5322 pendingRequest.input_buffer =
5323 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5324 *(pendingRequest.input_buffer) = *(request->input_buffer);
5325 pInputBuffer = pendingRequest.input_buffer;
5326 } else {
5327 pendingRequest.input_buffer = NULL;
5328 pInputBuffer = NULL;
5329 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005330 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005331
5332 pendingRequest.pipeline_depth = 0;
5333 pendingRequest.partial_result_cnt = 0;
5334 extractJpegMetadata(mCurJpegMeta, request);
5335 pendingRequest.jpegMetadata = mCurJpegMeta;
5336 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005337 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005338 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5339 mHybridAeEnable =
5340 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5341 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005342
5343 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5344 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005345 /* DevCamDebug metadata processCaptureRequest */
5346 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5347 mDevCamDebugMetaEnable =
5348 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5349 }
5350 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5351 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005352
5353 //extract CAC info
5354 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5355 mCacMode =
5356 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5357 }
5358 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005359 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005360
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005361 // extract enableZsl info
5362 if (gExposeEnableZslKey) {
5363 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5364 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5365 mZslEnabled = pendingRequest.enableZsl;
5366 } else {
5367 pendingRequest.enableZsl = mZslEnabled;
5368 }
5369 }
5370
Thierry Strudel3d639192016-09-09 11:52:26 -07005371 PendingBuffersInRequest bufsForCurRequest;
5372 bufsForCurRequest.frame_number = frameNumber;
5373 // Mark current timestamp for the new request
5374 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005375 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005376
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005377 if (hdrPlusRequest) {
5378 // Save settings for this request.
5379 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5380 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5381
5382 // Add to pending HDR+ request queue.
5383 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5384 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5385
5386 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5387 }
5388
Thierry Strudel3d639192016-09-09 11:52:26 -07005389 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005390 if ((request->output_buffers[i].stream->data_space ==
5391 HAL_DATASPACE_DEPTH) &&
5392 (HAL_PIXEL_FORMAT_BLOB ==
5393 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005394 continue;
5395 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005396 RequestedBufferInfo requestedBuf;
5397 memset(&requestedBuf, 0, sizeof(requestedBuf));
5398 requestedBuf.stream = request->output_buffers[i].stream;
5399 requestedBuf.buffer = NULL;
5400 pendingRequest.buffers.push_back(requestedBuf);
5401
5402 // Add to buffer handle the pending buffers list
5403 PendingBufferInfo bufferInfo;
5404 bufferInfo.buffer = request->output_buffers[i].buffer;
5405 bufferInfo.stream = request->output_buffers[i].stream;
5406 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5407 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5408 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5409 frameNumber, bufferInfo.buffer,
5410 channel->getStreamTypeMask(), bufferInfo.stream->format);
5411 }
5412 // Add this request packet into mPendingBuffersMap
5413 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5414 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5415 mPendingBuffersMap.get_num_overall_buffers());
5416
5417 latestRequest = mPendingRequestsList.insert(
5418 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005419
5420 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5421 // for the frame number.
5422 mShutterDispatcher.expectShutter(frameNumber);
5423 for (size_t i = 0; i < request->num_output_buffers; i++) {
5424 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5425 }
5426
Thierry Strudel3d639192016-09-09 11:52:26 -07005427 if(mFlush) {
5428 LOGI("mFlush is true");
5429 pthread_mutex_unlock(&mMutex);
5430 return NO_ERROR;
5431 }
5432
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005433 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5434 // channel.
5435 if (!hdrPlusRequest) {
5436 int indexUsed;
5437 // Notify metadata channel we receive a request
5438 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005439
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005440 if(request->input_buffer != NULL){
5441 LOGD("Input request, frame_number %d", frameNumber);
5442 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5443 if (NO_ERROR != rc) {
5444 LOGE("fail to set reproc parameters");
5445 pthread_mutex_unlock(&mMutex);
5446 return rc;
5447 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005448 }
5449
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005450 // Call request on other streams
5451 uint32_t streams_need_metadata = 0;
5452 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5453 for (size_t i = 0; i < request->num_output_buffers; i++) {
5454 const camera3_stream_buffer_t& output = request->output_buffers[i];
5455 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5456
5457 if (channel == NULL) {
5458 LOGW("invalid channel pointer for stream");
5459 continue;
5460 }
5461
5462 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5463 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5464 output.buffer, request->input_buffer, frameNumber);
5465 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005466 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005467 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5468 if (rc < 0) {
5469 LOGE("Fail to request on picture channel");
5470 pthread_mutex_unlock(&mMutex);
5471 return rc;
5472 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005473 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005474 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5475 assert(NULL != mDepthChannel);
5476 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005477
Emilian Peev7650c122017-01-19 08:24:33 -08005478 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5479 if (rc < 0) {
5480 LOGE("Fail to map on depth buffer");
5481 pthread_mutex_unlock(&mMutex);
5482 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005483 }
Emilian Peev7650c122017-01-19 08:24:33 -08005484 } else {
5485 LOGD("snapshot request with buffer %p, frame_number %d",
5486 output.buffer, frameNumber);
5487 if (!request->settings) {
5488 rc = channel->request(output.buffer, frameNumber,
5489 NULL, mPrevParameters, indexUsed);
5490 } else {
5491 rc = channel->request(output.buffer, frameNumber,
5492 NULL, mParameters, indexUsed);
5493 }
5494 if (rc < 0) {
5495 LOGE("Fail to request on picture channel");
5496 pthread_mutex_unlock(&mMutex);
5497 return rc;
5498 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005499
Emilian Peev7650c122017-01-19 08:24:33 -08005500 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5501 uint32_t j = 0;
5502 for (j = 0; j < streamsArray.num_streams; j++) {
5503 if (streamsArray.stream_request[j].streamID == streamId) {
5504 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5505 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5506 else
5507 streamsArray.stream_request[j].buf_index = indexUsed;
5508 break;
5509 }
5510 }
5511 if (j == streamsArray.num_streams) {
5512 LOGE("Did not find matching stream to update index");
5513 assert(0);
5514 }
5515
5516 pendingBufferIter->need_metadata = true;
5517 streams_need_metadata++;
5518 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005519 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005520 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5521 bool needMetadata = false;
5522 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5523 rc = yuvChannel->request(output.buffer, frameNumber,
5524 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5525 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005526 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005527 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005528 pthread_mutex_unlock(&mMutex);
5529 return rc;
5530 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005531
5532 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5533 uint32_t j = 0;
5534 for (j = 0; j < streamsArray.num_streams; j++) {
5535 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005536 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5537 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5538 else
5539 streamsArray.stream_request[j].buf_index = indexUsed;
5540 break;
5541 }
5542 }
5543 if (j == streamsArray.num_streams) {
5544 LOGE("Did not find matching stream to update index");
5545 assert(0);
5546 }
5547
5548 pendingBufferIter->need_metadata = needMetadata;
5549 if (needMetadata)
5550 streams_need_metadata += 1;
5551 LOGD("calling YUV channel request, need_metadata is %d",
5552 needMetadata);
5553 } else {
5554 LOGD("request with buffer %p, frame_number %d",
5555 output.buffer, frameNumber);
5556
5557 rc = channel->request(output.buffer, frameNumber, indexUsed);
5558
5559 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5560 uint32_t j = 0;
5561 for (j = 0; j < streamsArray.num_streams; j++) {
5562 if (streamsArray.stream_request[j].streamID == streamId) {
5563 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5564 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5565 else
5566 streamsArray.stream_request[j].buf_index = indexUsed;
5567 break;
5568 }
5569 }
5570 if (j == streamsArray.num_streams) {
5571 LOGE("Did not find matching stream to update index");
5572 assert(0);
5573 }
5574
5575 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5576 && mBatchSize) {
5577 mToBeQueuedVidBufs++;
5578 if (mToBeQueuedVidBufs == mBatchSize) {
5579 channel->queueBatchBuf();
5580 }
5581 }
5582 if (rc < 0) {
5583 LOGE("request failed");
5584 pthread_mutex_unlock(&mMutex);
5585 return rc;
5586 }
5587 }
5588 pendingBufferIter++;
5589 }
5590
5591 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5592 itr++) {
5593 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5594
5595 if (channel == NULL) {
5596 LOGE("invalid channel pointer for stream");
5597 assert(0);
5598 return BAD_VALUE;
5599 }
5600
5601 InternalRequest requestedStream;
5602 requestedStream = (*itr);
5603
5604
5605 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5606 LOGD("snapshot request internally input buffer %p, frame_number %d",
5607 request->input_buffer, frameNumber);
5608 if(request->input_buffer != NULL){
5609 rc = channel->request(NULL, frameNumber,
5610 pInputBuffer, &mReprocMeta, indexUsed, true,
5611 requestedStream.meteringOnly);
5612 if (rc < 0) {
5613 LOGE("Fail to request on picture channel");
5614 pthread_mutex_unlock(&mMutex);
5615 return rc;
5616 }
5617 } else {
5618 LOGD("snapshot request with frame_number %d", frameNumber);
5619 if (!request->settings) {
5620 rc = channel->request(NULL, frameNumber,
5621 NULL, mPrevParameters, indexUsed, true,
5622 requestedStream.meteringOnly);
5623 } else {
5624 rc = channel->request(NULL, frameNumber,
5625 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5626 }
5627 if (rc < 0) {
5628 LOGE("Fail to request on picture channel");
5629 pthread_mutex_unlock(&mMutex);
5630 return rc;
5631 }
5632
5633 if ((*itr).meteringOnly != 1) {
5634 requestedStream.need_metadata = 1;
5635 streams_need_metadata++;
5636 }
5637 }
5638
5639 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5640 uint32_t j = 0;
5641 for (j = 0; j < streamsArray.num_streams; j++) {
5642 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005643 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5644 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5645 else
5646 streamsArray.stream_request[j].buf_index = indexUsed;
5647 break;
5648 }
5649 }
5650 if (j == streamsArray.num_streams) {
5651 LOGE("Did not find matching stream to update index");
5652 assert(0);
5653 }
5654
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005655 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005656 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005657 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005658 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005659 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005660 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005661 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005662
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005663 //If 2 streams have need_metadata set to true, fail the request, unless
5664 //we copy/reference count the metadata buffer
5665 if (streams_need_metadata > 1) {
5666 LOGE("not supporting request in which two streams requires"
5667 " 2 HAL metadata for reprocessing");
5668 pthread_mutex_unlock(&mMutex);
5669 return -EINVAL;
5670 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005671
Emilian Peev7650c122017-01-19 08:24:33 -08005672 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5673 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5674 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5675 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5676 pthread_mutex_unlock(&mMutex);
5677 return BAD_VALUE;
5678 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005679 if (request->input_buffer == NULL) {
5680 /* Set the parameters to backend:
5681 * - For every request in NORMAL MODE
5682 * - For every request in HFR mode during preview only case
5683 * - Once every batch in HFR mode during video recording
5684 */
5685 if (!mBatchSize ||
5686 (mBatchSize && !isVidBufRequested) ||
5687 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5688 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5689 mBatchSize, isVidBufRequested,
5690 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005691
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005692 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5693 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5694 uint32_t m = 0;
5695 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5696 if (streamsArray.stream_request[k].streamID ==
5697 mBatchedStreamsArray.stream_request[m].streamID)
5698 break;
5699 }
5700 if (m == mBatchedStreamsArray.num_streams) {
5701 mBatchedStreamsArray.stream_request\
5702 [mBatchedStreamsArray.num_streams].streamID =
5703 streamsArray.stream_request[k].streamID;
5704 mBatchedStreamsArray.stream_request\
5705 [mBatchedStreamsArray.num_streams].buf_index =
5706 streamsArray.stream_request[k].buf_index;
5707 mBatchedStreamsArray.num_streams =
5708 mBatchedStreamsArray.num_streams + 1;
5709 }
5710 }
5711 streamsArray = mBatchedStreamsArray;
5712 }
5713 /* Update stream id of all the requested buffers */
5714 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5715 streamsArray)) {
5716 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005717 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005718 return BAD_VALUE;
5719 }
5720
5721 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5722 mParameters);
5723 if (rc < 0) {
5724 LOGE("set_parms failed");
5725 }
5726 /* reset to zero coz, the batch is queued */
5727 mToBeQueuedVidBufs = 0;
5728 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5729 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5730 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005731 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5732 uint32_t m = 0;
5733 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5734 if (streamsArray.stream_request[k].streamID ==
5735 mBatchedStreamsArray.stream_request[m].streamID)
5736 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005737 }
5738 if (m == mBatchedStreamsArray.num_streams) {
5739 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5740 streamID = streamsArray.stream_request[k].streamID;
5741 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5742 buf_index = streamsArray.stream_request[k].buf_index;
5743 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5744 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005745 }
5746 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005747 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005748
5749 // Start all streams after the first setting is sent, so that the
5750 // setting can be applied sooner: (0 + apply_delay)th frame.
5751 if (mState == CONFIGURED && mChannelHandle) {
5752 //Then start them.
5753 LOGH("Start META Channel");
5754 rc = mMetadataChannel->start();
5755 if (rc < 0) {
5756 LOGE("META channel start failed");
5757 pthread_mutex_unlock(&mMutex);
5758 return rc;
5759 }
5760
5761 if (mAnalysisChannel) {
5762 rc = mAnalysisChannel->start();
5763 if (rc < 0) {
5764 LOGE("Analysis channel start failed");
5765 mMetadataChannel->stop();
5766 pthread_mutex_unlock(&mMutex);
5767 return rc;
5768 }
5769 }
5770
5771 if (mSupportChannel) {
5772 rc = mSupportChannel->start();
5773 if (rc < 0) {
5774 LOGE("Support channel start failed");
5775 mMetadataChannel->stop();
5776 /* Although support and analysis are mutually exclusive today
5777 adding it in anycase for future proofing */
5778 if (mAnalysisChannel) {
5779 mAnalysisChannel->stop();
5780 }
5781 pthread_mutex_unlock(&mMutex);
5782 return rc;
5783 }
5784 }
5785 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5786 it != mStreamInfo.end(); it++) {
5787 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5788 LOGH("Start Processing Channel mask=%d",
5789 channel->getStreamTypeMask());
5790 rc = channel->start();
5791 if (rc < 0) {
5792 LOGE("channel start failed");
5793 pthread_mutex_unlock(&mMutex);
5794 return rc;
5795 }
5796 }
5797
5798 if (mRawDumpChannel) {
5799 LOGD("Starting raw dump stream");
5800 rc = mRawDumpChannel->start();
5801 if (rc != NO_ERROR) {
5802 LOGE("Error Starting Raw Dump Channel");
5803 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5804 it != mStreamInfo.end(); it++) {
5805 QCamera3Channel *channel =
5806 (QCamera3Channel *)(*it)->stream->priv;
5807 LOGH("Stopping Processing Channel mask=%d",
5808 channel->getStreamTypeMask());
5809 channel->stop();
5810 }
5811 if (mSupportChannel)
5812 mSupportChannel->stop();
5813 if (mAnalysisChannel) {
5814 mAnalysisChannel->stop();
5815 }
5816 mMetadataChannel->stop();
5817 pthread_mutex_unlock(&mMutex);
5818 return rc;
5819 }
5820 }
5821
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005822 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005823 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005824 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005825 if (rc != NO_ERROR) {
5826 LOGE("start_channel failed %d", rc);
5827 pthread_mutex_unlock(&mMutex);
5828 return rc;
5829 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005830
5831 {
5832 // Configure Easel for stream on.
5833 Mutex::Autolock l(gHdrPlusClientLock);
5834 if (EaselManagerClientOpened) {
5835 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
5836 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
5837 if (rc != OK) {
5838 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5839 mCameraId, mSensorModeInfo.op_pixel_clk);
5840 pthread_mutex_unlock(&mMutex);
5841 return rc;
5842 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005843 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005844 }
5845 }
5846
5847 // Start sensor streaming.
5848 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5849 mChannelHandle);
5850 if (rc != NO_ERROR) {
5851 LOGE("start_sensor_stream_on failed %d", rc);
5852 pthread_mutex_unlock(&mMutex);
5853 return rc;
5854 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005855 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005856 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005857 }
5858
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005859 // Enable HDR+ mode for the first PREVIEW_INTENT request.
5860 {
5861 Mutex::Autolock l(gHdrPlusClientLock);
5862 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5863 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5864 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5865 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5866 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5867 rc = enableHdrPlusModeLocked();
5868 if (rc != OK) {
5869 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
5870 pthread_mutex_unlock(&mMutex);
5871 return rc;
5872 }
5873
5874 mFirstPreviewIntentSeen = true;
5875 }
5876 }
5877
Thierry Strudel3d639192016-09-09 11:52:26 -07005878 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5879
5880 mState = STARTED;
5881 // Added a timed condition wait
5882 struct timespec ts;
5883 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005884 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005885 if (rc < 0) {
5886 isValidTimeout = 0;
5887 LOGE("Error reading the real time clock!!");
5888 }
5889 else {
5890 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005891 int64_t timeout = 5;
5892 {
5893 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5894 // If there is a pending HDR+ request, the following requests may be blocked until the
5895 // HDR+ request is done. So allow a longer timeout.
5896 if (mHdrPlusPendingRequests.size() > 0) {
5897 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5898 }
5899 }
5900 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005901 }
5902 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005903 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005904 (mState != ERROR) && (mState != DEINIT)) {
5905 if (!isValidTimeout) {
5906 LOGD("Blocking on conditional wait");
5907 pthread_cond_wait(&mRequestCond, &mMutex);
5908 }
5909 else {
5910 LOGD("Blocking on timed conditional wait");
5911 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5912 if (rc == ETIMEDOUT) {
5913 rc = -ENODEV;
5914 LOGE("Unblocked on timeout!!!!");
5915 break;
5916 }
5917 }
5918 LOGD("Unblocked");
5919 if (mWokenUpByDaemon) {
5920 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005921 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005922 break;
5923 }
5924 }
5925 pthread_mutex_unlock(&mMutex);
5926
5927 return rc;
5928}
5929
5930/*===========================================================================
5931 * FUNCTION : dump
5932 *
5933 * DESCRIPTION:
5934 *
5935 * PARAMETERS :
5936 *
5937 *
5938 * RETURN :
5939 *==========================================================================*/
5940void QCamera3HardwareInterface::dump(int fd)
5941{
5942 pthread_mutex_lock(&mMutex);
5943 dprintf(fd, "\n Camera HAL3 information Begin \n");
5944
5945 dprintf(fd, "\nNumber of pending requests: %zu \n",
5946 mPendingRequestsList.size());
5947 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5948 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5949 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5950 for(pendingRequestIterator i = mPendingRequestsList.begin();
5951 i != mPendingRequestsList.end(); i++) {
5952 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5953 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5954 i->input_buffer);
5955 }
5956 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5957 mPendingBuffersMap.get_num_overall_buffers());
5958 dprintf(fd, "-------+------------------\n");
5959 dprintf(fd, " Frame | Stream type mask \n");
5960 dprintf(fd, "-------+------------------\n");
5961 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5962 for(auto &j : req.mPendingBufferList) {
5963 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5964 dprintf(fd, " %5d | %11d \n",
5965 req.frame_number, channel->getStreamTypeMask());
5966 }
5967 }
5968 dprintf(fd, "-------+------------------\n");
5969
5970 dprintf(fd, "\nPending frame drop list: %zu\n",
5971 mPendingFrameDropList.size());
5972 dprintf(fd, "-------+-----------\n");
5973 dprintf(fd, " Frame | Stream ID \n");
5974 dprintf(fd, "-------+-----------\n");
5975 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5976 i != mPendingFrameDropList.end(); i++) {
5977 dprintf(fd, " %5d | %9d \n",
5978 i->frame_number, i->stream_ID);
5979 }
5980 dprintf(fd, "-------+-----------\n");
5981
5982 dprintf(fd, "\n Camera HAL3 information End \n");
5983
5984 /* use dumpsys media.camera as trigger to send update debug level event */
5985 mUpdateDebugLevel = true;
5986 pthread_mutex_unlock(&mMutex);
5987 return;
5988}
5989
5990/*===========================================================================
5991 * FUNCTION : flush
5992 *
5993 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5994 * conditionally restarts channels
5995 *
5996 * PARAMETERS :
5997 * @ restartChannels: re-start all channels
5998 *
5999 *
6000 * RETURN :
6001 * 0 on success
6002 * Error code on failure
6003 *==========================================================================*/
6004int QCamera3HardwareInterface::flush(bool restartChannels)
6005{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006006 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006007 int32_t rc = NO_ERROR;
6008
6009 LOGD("Unblocking Process Capture Request");
6010 pthread_mutex_lock(&mMutex);
6011 mFlush = true;
6012 pthread_mutex_unlock(&mMutex);
6013
6014 rc = stopAllChannels();
6015 // unlink of dualcam
6016 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006017 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6018 &m_pDualCamCmdPtr->bundle_info;
6019 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006020 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6021 pthread_mutex_lock(&gCamLock);
6022
6023 if (mIsMainCamera == 1) {
6024 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6025 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006026 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006027 // related session id should be session id of linked session
6028 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6029 } else {
6030 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6031 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006032 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006033 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6034 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006035 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006036 pthread_mutex_unlock(&gCamLock);
6037
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006038 rc = mCameraHandle->ops->set_dual_cam_cmd(
6039 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006040 if (rc < 0) {
6041 LOGE("Dualcam: Unlink failed, but still proceed to close");
6042 }
6043 }
6044
6045 if (rc < 0) {
6046 LOGE("stopAllChannels failed");
6047 return rc;
6048 }
6049 if (mChannelHandle) {
6050 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6051 mChannelHandle);
6052 }
6053
6054 // Reset bundle info
6055 rc = setBundleInfo();
6056 if (rc < 0) {
6057 LOGE("setBundleInfo failed %d", rc);
6058 return rc;
6059 }
6060
6061 // Mutex Lock
6062 pthread_mutex_lock(&mMutex);
6063
6064 // Unblock process_capture_request
6065 mPendingLiveRequest = 0;
6066 pthread_cond_signal(&mRequestCond);
6067
6068 rc = notifyErrorForPendingRequests();
6069 if (rc < 0) {
6070 LOGE("notifyErrorForPendingRequests failed");
6071 pthread_mutex_unlock(&mMutex);
6072 return rc;
6073 }
6074
6075 mFlush = false;
6076
6077 // Start the Streams/Channels
6078 if (restartChannels) {
6079 rc = startAllChannels();
6080 if (rc < 0) {
6081 LOGE("startAllChannels failed");
6082 pthread_mutex_unlock(&mMutex);
6083 return rc;
6084 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006085 if (mChannelHandle) {
6086 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006087 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006088 if (rc < 0) {
6089 LOGE("start_channel failed");
6090 pthread_mutex_unlock(&mMutex);
6091 return rc;
6092 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006093 }
6094 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006095 pthread_mutex_unlock(&mMutex);
6096
6097 return 0;
6098}
6099
6100/*===========================================================================
6101 * FUNCTION : flushPerf
6102 *
6103 * DESCRIPTION: This is the performance optimization version of flush that does
6104 * not use stream off, rather flushes the system
6105 *
6106 * PARAMETERS :
6107 *
6108 *
6109 * RETURN : 0 : success
6110 * -EINVAL: input is malformed (device is not valid)
6111 * -ENODEV: if the device has encountered a serious error
6112 *==========================================================================*/
6113int QCamera3HardwareInterface::flushPerf()
6114{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006115 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006116 int32_t rc = 0;
6117 struct timespec timeout;
6118 bool timed_wait = false;
6119
6120 pthread_mutex_lock(&mMutex);
6121 mFlushPerf = true;
6122 mPendingBuffersMap.numPendingBufsAtFlush =
6123 mPendingBuffersMap.get_num_overall_buffers();
6124 LOGD("Calling flush. Wait for %d buffers to return",
6125 mPendingBuffersMap.numPendingBufsAtFlush);
6126
6127 /* send the flush event to the backend */
6128 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6129 if (rc < 0) {
6130 LOGE("Error in flush: IOCTL failure");
6131 mFlushPerf = false;
6132 pthread_mutex_unlock(&mMutex);
6133 return -ENODEV;
6134 }
6135
6136 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6137 LOGD("No pending buffers in HAL, return flush");
6138 mFlushPerf = false;
6139 pthread_mutex_unlock(&mMutex);
6140 return rc;
6141 }
6142
6143 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006144 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006145 if (rc < 0) {
6146 LOGE("Error reading the real time clock, cannot use timed wait");
6147 } else {
6148 timeout.tv_sec += FLUSH_TIMEOUT;
6149 timed_wait = true;
6150 }
6151
6152 //Block on conditional variable
6153 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6154 LOGD("Waiting on mBuffersCond");
6155 if (!timed_wait) {
6156 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6157 if (rc != 0) {
6158 LOGE("pthread_cond_wait failed due to rc = %s",
6159 strerror(rc));
6160 break;
6161 }
6162 } else {
6163 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6164 if (rc != 0) {
6165 LOGE("pthread_cond_timedwait failed due to rc = %s",
6166 strerror(rc));
6167 break;
6168 }
6169 }
6170 }
6171 if (rc != 0) {
6172 mFlushPerf = false;
6173 pthread_mutex_unlock(&mMutex);
6174 return -ENODEV;
6175 }
6176
6177 LOGD("Received buffers, now safe to return them");
6178
6179 //make sure the channels handle flush
6180 //currently only required for the picture channel to release snapshot resources
6181 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6182 it != mStreamInfo.end(); it++) {
6183 QCamera3Channel *channel = (*it)->channel;
6184 if (channel) {
6185 rc = channel->flush();
6186 if (rc) {
6187 LOGE("Flushing the channels failed with error %d", rc);
6188 // even though the channel flush failed we need to continue and
6189 // return the buffers we have to the framework, however the return
6190 // value will be an error
6191 rc = -ENODEV;
6192 }
6193 }
6194 }
6195
6196 /* notify the frameworks and send errored results */
6197 rc = notifyErrorForPendingRequests();
6198 if (rc < 0) {
6199 LOGE("notifyErrorForPendingRequests failed");
6200 pthread_mutex_unlock(&mMutex);
6201 return rc;
6202 }
6203
6204 //unblock process_capture_request
6205 mPendingLiveRequest = 0;
6206 unblockRequestIfNecessary();
6207
6208 mFlushPerf = false;
6209 pthread_mutex_unlock(&mMutex);
6210 LOGD ("Flush Operation complete. rc = %d", rc);
6211 return rc;
6212}
6213
6214/*===========================================================================
6215 * FUNCTION : handleCameraDeviceError
6216 *
6217 * DESCRIPTION: This function calls internal flush and notifies the error to
6218 * framework and updates the state variable.
6219 *
6220 * PARAMETERS : None
6221 *
6222 * RETURN : NO_ERROR on Success
6223 * Error code on failure
6224 *==========================================================================*/
6225int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6226{
6227 int32_t rc = NO_ERROR;
6228
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006229 {
6230 Mutex::Autolock lock(mFlushLock);
6231 pthread_mutex_lock(&mMutex);
6232 if (mState != ERROR) {
6233 //if mState != ERROR, nothing to be done
6234 pthread_mutex_unlock(&mMutex);
6235 return NO_ERROR;
6236 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006237 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006238
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006239 rc = flush(false /* restart channels */);
6240 if (NO_ERROR != rc) {
6241 LOGE("internal flush to handle mState = ERROR failed");
6242 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006243
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006244 pthread_mutex_lock(&mMutex);
6245 mState = DEINIT;
6246 pthread_mutex_unlock(&mMutex);
6247 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006248
6249 camera3_notify_msg_t notify_msg;
6250 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6251 notify_msg.type = CAMERA3_MSG_ERROR;
6252 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6253 notify_msg.message.error.error_stream = NULL;
6254 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006255 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006256
6257 return rc;
6258}
6259
6260/*===========================================================================
6261 * FUNCTION : captureResultCb
6262 *
6263 * DESCRIPTION: Callback handler for all capture result
6264 * (streams, as well as metadata)
6265 *
6266 * PARAMETERS :
6267 * @metadata : metadata information
6268 * @buffer : actual gralloc buffer to be returned to frameworks.
6269 * NULL if metadata.
6270 *
6271 * RETURN : NONE
6272 *==========================================================================*/
6273void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6274 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6275{
6276 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006277 pthread_mutex_lock(&mMutex);
6278 uint8_t batchSize = mBatchSize;
6279 pthread_mutex_unlock(&mMutex);
6280 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006281 handleBatchMetadata(metadata_buf,
6282 true /* free_and_bufdone_meta_buf */);
6283 } else { /* mBatchSize = 0 */
6284 hdrPlusPerfLock(metadata_buf);
6285 pthread_mutex_lock(&mMutex);
6286 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006287 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006288 true /* last urgent frame of batch metadata */,
6289 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006290 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006291 pthread_mutex_unlock(&mMutex);
6292 }
6293 } else if (isInputBuffer) {
6294 pthread_mutex_lock(&mMutex);
6295 handleInputBufferWithLock(frame_number);
6296 pthread_mutex_unlock(&mMutex);
6297 } else {
6298 pthread_mutex_lock(&mMutex);
6299 handleBufferWithLock(buffer, frame_number);
6300 pthread_mutex_unlock(&mMutex);
6301 }
6302 return;
6303}
6304
6305/*===========================================================================
6306 * FUNCTION : getReprocessibleOutputStreamId
6307 *
6308 * DESCRIPTION: Get source output stream id for the input reprocess stream
6309 * based on size and format, which would be the largest
6310 * output stream if an input stream exists.
6311 *
6312 * PARAMETERS :
6313 * @id : return the stream id if found
6314 *
6315 * RETURN : int32_t type of status
6316 * NO_ERROR -- success
6317 * none-zero failure code
6318 *==========================================================================*/
6319int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6320{
6321 /* check if any output or bidirectional stream with the same size and format
6322 and return that stream */
6323 if ((mInputStreamInfo.dim.width > 0) &&
6324 (mInputStreamInfo.dim.height > 0)) {
6325 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6326 it != mStreamInfo.end(); it++) {
6327
6328 camera3_stream_t *stream = (*it)->stream;
6329 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6330 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6331 (stream->format == mInputStreamInfo.format)) {
6332 // Usage flag for an input stream and the source output stream
6333 // may be different.
6334 LOGD("Found reprocessible output stream! %p", *it);
6335 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6336 stream->usage, mInputStreamInfo.usage);
6337
6338 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6339 if (channel != NULL && channel->mStreams[0]) {
6340 id = channel->mStreams[0]->getMyServerID();
6341 return NO_ERROR;
6342 }
6343 }
6344 }
6345 } else {
6346 LOGD("No input stream, so no reprocessible output stream");
6347 }
6348 return NAME_NOT_FOUND;
6349}
6350
6351/*===========================================================================
6352 * FUNCTION : lookupFwkName
6353 *
6354 * DESCRIPTION: In case the enum is not same in fwk and backend
6355 * make sure the parameter is correctly propogated
6356 *
6357 * PARAMETERS :
6358 * @arr : map between the two enums
6359 * @len : len of the map
6360 * @hal_name : name of the hal_parm to map
6361 *
6362 * RETURN : int type of status
6363 * fwk_name -- success
6364 * none-zero failure code
6365 *==========================================================================*/
6366template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6367 size_t len, halType hal_name)
6368{
6369
6370 for (size_t i = 0; i < len; i++) {
6371 if (arr[i].hal_name == hal_name) {
6372 return arr[i].fwk_name;
6373 }
6374 }
6375
6376 /* Not able to find matching framework type is not necessarily
6377 * an error case. This happens when mm-camera supports more attributes
6378 * than the frameworks do */
6379 LOGH("Cannot find matching framework type");
6380 return NAME_NOT_FOUND;
6381}
6382
6383/*===========================================================================
6384 * FUNCTION : lookupHalName
6385 *
6386 * DESCRIPTION: In case the enum is not same in fwk and backend
6387 * make sure the parameter is correctly propogated
6388 *
6389 * PARAMETERS :
6390 * @arr : map between the two enums
6391 * @len : len of the map
6392 * @fwk_name : name of the hal_parm to map
6393 *
6394 * RETURN : int32_t type of status
6395 * hal_name -- success
6396 * none-zero failure code
6397 *==========================================================================*/
6398template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6399 size_t len, fwkType fwk_name)
6400{
6401 for (size_t i = 0; i < len; i++) {
6402 if (arr[i].fwk_name == fwk_name) {
6403 return arr[i].hal_name;
6404 }
6405 }
6406
6407 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6408 return NAME_NOT_FOUND;
6409}
6410
6411/*===========================================================================
6412 * FUNCTION : lookupProp
6413 *
6414 * DESCRIPTION: lookup a value by its name
6415 *
6416 * PARAMETERS :
6417 * @arr : map between the two enums
6418 * @len : size of the map
6419 * @name : name to be looked up
6420 *
6421 * RETURN : Value if found
6422 * CAM_CDS_MODE_MAX if not found
6423 *==========================================================================*/
6424template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6425 size_t len, const char *name)
6426{
6427 if (name) {
6428 for (size_t i = 0; i < len; i++) {
6429 if (!strcmp(arr[i].desc, name)) {
6430 return arr[i].val;
6431 }
6432 }
6433 }
6434 return CAM_CDS_MODE_MAX;
6435}
6436
6437/*===========================================================================
6438 *
6439 * DESCRIPTION:
6440 *
6441 * PARAMETERS :
6442 * @metadata : metadata information from callback
6443 * @timestamp: metadata buffer timestamp
6444 * @request_id: request id
6445 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006446 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006447 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6448 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006449 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006450 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6451 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006452 *
6453 * RETURN : camera_metadata_t*
6454 * metadata in a format specified by fwk
6455 *==========================================================================*/
6456camera_metadata_t*
6457QCamera3HardwareInterface::translateFromHalMetadata(
6458 metadata_buffer_t *metadata,
6459 nsecs_t timestamp,
6460 int32_t request_id,
6461 const CameraMetadata& jpegMetadata,
6462 uint8_t pipeline_depth,
6463 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006464 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006465 /* DevCamDebug metadata translateFromHalMetadata argument */
6466 uint8_t DevCamDebug_meta_enable,
6467 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006468 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006469 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006470 bool lastMetadataInBatch,
6471 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006472{
6473 CameraMetadata camMetadata;
6474 camera_metadata_t *resultMetadata;
6475
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006476 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006477 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6478 * Timestamp is needed because it's used for shutter notify calculation.
6479 * */
6480 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6481 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006482 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006483 }
6484
Thierry Strudel3d639192016-09-09 11:52:26 -07006485 if (jpegMetadata.entryCount())
6486 camMetadata.append(jpegMetadata);
6487
6488 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6489 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6490 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6491 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006492 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006493 if (mBatchSize == 0) {
6494 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6495 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6496 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006497
Samuel Ha68ba5172016-12-15 18:41:12 -08006498 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6499 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6500 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6501 // DevCamDebug metadata translateFromHalMetadata AF
6502 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6503 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6504 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6505 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6506 }
6507 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6508 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6509 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6510 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6511 }
6512 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6513 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6514 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6515 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6516 }
6517 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6518 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6519 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6520 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6521 }
6522 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6523 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6524 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6525 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6526 }
6527 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6528 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6529 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6530 *DevCamDebug_af_monitor_pdaf_target_pos;
6531 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6532 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6533 }
6534 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6535 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6536 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6537 *DevCamDebug_af_monitor_pdaf_confidence;
6538 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6539 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6540 }
6541 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6542 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6543 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6544 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6545 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6546 }
6547 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6548 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6549 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6550 *DevCamDebug_af_monitor_tof_target_pos;
6551 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6552 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6553 }
6554 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6555 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6556 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6557 *DevCamDebug_af_monitor_tof_confidence;
6558 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6559 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6560 }
6561 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6562 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6563 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6564 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6565 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6566 }
6567 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6568 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6569 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6570 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6571 &fwk_DevCamDebug_af_monitor_type_select, 1);
6572 }
6573 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6574 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6575 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6576 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6577 &fwk_DevCamDebug_af_monitor_refocus, 1);
6578 }
6579 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6580 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6581 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6582 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6583 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6584 }
6585 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6586 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6587 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6588 *DevCamDebug_af_search_pdaf_target_pos;
6589 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6590 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6591 }
6592 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6593 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6594 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6595 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6596 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6597 }
6598 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6599 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6600 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6601 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6602 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6603 }
6604 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6605 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6606 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6607 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6608 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6609 }
6610 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6611 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6612 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6613 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6614 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6615 }
6616 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6617 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6618 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6619 *DevCamDebug_af_search_tof_target_pos;
6620 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6621 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6622 }
6623 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6624 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6625 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6626 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6627 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6628 }
6629 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6630 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6631 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6632 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6633 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6634 }
6635 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6636 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6637 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6638 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6639 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6640 }
6641 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6642 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6643 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6644 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6645 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6646 }
6647 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6648 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6649 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6650 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6651 &fwk_DevCamDebug_af_search_type_select, 1);
6652 }
6653 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6654 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6655 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6656 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6657 &fwk_DevCamDebug_af_search_next_pos, 1);
6658 }
6659 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6660 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6661 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6662 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6663 &fwk_DevCamDebug_af_search_target_pos, 1);
6664 }
6665 // DevCamDebug metadata translateFromHalMetadata AEC
6666 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6667 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6668 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6669 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6670 }
6671 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6672 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6673 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6674 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6675 }
6676 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6677 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6678 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6679 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6680 }
6681 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6682 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6683 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6684 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6685 }
6686 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6687 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6688 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6689 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6690 }
6691 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6692 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6693 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6694 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6695 }
6696 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6697 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6698 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6699 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6700 }
6701 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6702 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6703 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6704 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6705 }
Samuel Ha34229982017-02-17 13:51:11 -08006706 // DevCamDebug metadata translateFromHalMetadata zzHDR
6707 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6708 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6709 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6710 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6711 }
6712 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6713 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006714 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006715 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6716 }
6717 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6718 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6719 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6720 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6721 }
6722 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6723 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006724 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006725 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6726 }
6727 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6728 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6729 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6730 *DevCamDebug_aec_hdr_sensitivity_ratio;
6731 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6732 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6733 }
6734 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6735 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6736 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6737 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6738 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6739 }
6740 // DevCamDebug metadata translateFromHalMetadata ADRC
6741 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6742 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6743 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6744 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6745 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6746 }
6747 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6748 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6749 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6750 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6751 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6752 }
6753 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6754 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6755 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6756 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6757 }
6758 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6759 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6760 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6761 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6762 }
6763 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6764 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6765 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6766 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6767 }
6768 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6769 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6770 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6771 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6772 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006773 // DevCamDebug metadata translateFromHalMetadata AWB
6774 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6775 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6776 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6777 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6778 }
6779 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6780 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6781 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6782 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6783 }
6784 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6785 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6786 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6787 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6788 }
6789 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6790 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6791 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6792 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6793 }
6794 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6795 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6796 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6797 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6798 }
6799 }
6800 // atrace_end(ATRACE_TAG_ALWAYS);
6801
Thierry Strudel3d639192016-09-09 11:52:26 -07006802 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6803 int64_t fwk_frame_number = *frame_number;
6804 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6805 }
6806
6807 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6808 int32_t fps_range[2];
6809 fps_range[0] = (int32_t)float_range->min_fps;
6810 fps_range[1] = (int32_t)float_range->max_fps;
6811 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6812 fps_range, 2);
6813 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6814 fps_range[0], fps_range[1]);
6815 }
6816
6817 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6818 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6819 }
6820
6821 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6822 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6823 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6824 *sceneMode);
6825 if (NAME_NOT_FOUND != val) {
6826 uint8_t fwkSceneMode = (uint8_t)val;
6827 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6828 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6829 fwkSceneMode);
6830 }
6831 }
6832
6833 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6834 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6835 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6836 }
6837
6838 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6839 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6840 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6841 }
6842
6843 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6844 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6845 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6846 }
6847
6848 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6849 CAM_INTF_META_EDGE_MODE, metadata) {
6850 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6851 }
6852
6853 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6854 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6855 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6856 }
6857
6858 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6859 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6860 }
6861
6862 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6863 if (0 <= *flashState) {
6864 uint8_t fwk_flashState = (uint8_t) *flashState;
6865 if (!gCamCapability[mCameraId]->flash_available) {
6866 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6867 }
6868 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6869 }
6870 }
6871
6872 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6873 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6874 if (NAME_NOT_FOUND != val) {
6875 uint8_t fwk_flashMode = (uint8_t)val;
6876 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6877 }
6878 }
6879
6880 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6881 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6882 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6883 }
6884
6885 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6886 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6887 }
6888
6889 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6890 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6891 }
6892
6893 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6894 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6895 }
6896
6897 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6898 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6899 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6900 }
6901
6902 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6903 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6904 LOGD("fwk_videoStab = %d", fwk_videoStab);
6905 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6906 } else {
6907 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6908 // and so hardcoding the Video Stab result to OFF mode.
6909 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6910 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006911 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006912 }
6913
6914 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6915 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6916 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6917 }
6918
6919 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6920 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6921 }
6922
Thierry Strudel3d639192016-09-09 11:52:26 -07006923 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6924 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006925 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006926
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006927 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6928 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006929
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006930 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006931 blackLevelAppliedPattern->cam_black_level[0],
6932 blackLevelAppliedPattern->cam_black_level[1],
6933 blackLevelAppliedPattern->cam_black_level[2],
6934 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006935 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6936 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006937
6938#ifndef USE_HAL_3_3
6939 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05306940 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07006941 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05306942 fwk_blackLevelInd[0] /= 16.0;
6943 fwk_blackLevelInd[1] /= 16.0;
6944 fwk_blackLevelInd[2] /= 16.0;
6945 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006946 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6947 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006948#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006949 }
6950
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006951#ifndef USE_HAL_3_3
6952 // Fixed whitelevel is used by ISP/Sensor
6953 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6954 &gCamCapability[mCameraId]->white_level, 1);
6955#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006956
6957 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6958 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6959 int32_t scalerCropRegion[4];
6960 scalerCropRegion[0] = hScalerCropRegion->left;
6961 scalerCropRegion[1] = hScalerCropRegion->top;
6962 scalerCropRegion[2] = hScalerCropRegion->width;
6963 scalerCropRegion[3] = hScalerCropRegion->height;
6964
6965 // Adjust crop region from sensor output coordinate system to active
6966 // array coordinate system.
6967 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6968 scalerCropRegion[2], scalerCropRegion[3]);
6969
6970 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6971 }
6972
6973 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6974 LOGD("sensorExpTime = %lld", *sensorExpTime);
6975 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6976 }
6977
6978 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6979 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6980 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6981 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6982 }
6983
6984 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6985 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6986 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6987 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6988 sensorRollingShutterSkew, 1);
6989 }
6990
6991 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6992 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6993 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6994
6995 //calculate the noise profile based on sensitivity
6996 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6997 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6998 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6999 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7000 noise_profile[i] = noise_profile_S;
7001 noise_profile[i+1] = noise_profile_O;
7002 }
7003 LOGD("noise model entry (S, O) is (%f, %f)",
7004 noise_profile_S, noise_profile_O);
7005 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7006 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7007 }
7008
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007009#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007010 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007011 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007012 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007013 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007014 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7015 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7016 }
7017 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007018#endif
7019
Thierry Strudel3d639192016-09-09 11:52:26 -07007020 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7021 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7022 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7023 }
7024
7025 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7026 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7027 *faceDetectMode);
7028 if (NAME_NOT_FOUND != val) {
7029 uint8_t fwk_faceDetectMode = (uint8_t)val;
7030 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7031
7032 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7033 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7034 CAM_INTF_META_FACE_DETECTION, metadata) {
7035 uint8_t numFaces = MIN(
7036 faceDetectionInfo->num_faces_detected, MAX_ROI);
7037 int32_t faceIds[MAX_ROI];
7038 uint8_t faceScores[MAX_ROI];
7039 int32_t faceRectangles[MAX_ROI * 4];
7040 int32_t faceLandmarks[MAX_ROI * 6];
7041 size_t j = 0, k = 0;
7042
7043 for (size_t i = 0; i < numFaces; i++) {
7044 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7045 // Adjust crop region from sensor output coordinate system to active
7046 // array coordinate system.
7047 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7048 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7049 rect.width, rect.height);
7050
7051 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7052 faceRectangles+j, -1);
7053
Jason Lee8ce36fa2017-04-19 19:40:37 -07007054 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7055 "bottom-right (%d, %d)",
7056 faceDetectionInfo->frame_id, i,
7057 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7058 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7059
Thierry Strudel3d639192016-09-09 11:52:26 -07007060 j+= 4;
7061 }
7062 if (numFaces <= 0) {
7063 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7064 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7065 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7066 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7067 }
7068
7069 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7070 numFaces);
7071 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7072 faceRectangles, numFaces * 4U);
7073 if (fwk_faceDetectMode ==
7074 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7075 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7076 CAM_INTF_META_FACE_LANDMARK, metadata) {
7077
7078 for (size_t i = 0; i < numFaces; i++) {
7079 // Map the co-ordinate sensor output coordinate system to active
7080 // array coordinate system.
7081 mCropRegionMapper.toActiveArray(
7082 landmarks->face_landmarks[i].left_eye_center.x,
7083 landmarks->face_landmarks[i].left_eye_center.y);
7084 mCropRegionMapper.toActiveArray(
7085 landmarks->face_landmarks[i].right_eye_center.x,
7086 landmarks->face_landmarks[i].right_eye_center.y);
7087 mCropRegionMapper.toActiveArray(
7088 landmarks->face_landmarks[i].mouth_center.x,
7089 landmarks->face_landmarks[i].mouth_center.y);
7090
7091 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007092
7093 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7094 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7095 faceDetectionInfo->frame_id, i,
7096 faceLandmarks[k + LEFT_EYE_X],
7097 faceLandmarks[k + LEFT_EYE_Y],
7098 faceLandmarks[k + RIGHT_EYE_X],
7099 faceLandmarks[k + RIGHT_EYE_Y],
7100 faceLandmarks[k + MOUTH_X],
7101 faceLandmarks[k + MOUTH_Y]);
7102
Thierry Strudel04e026f2016-10-10 11:27:36 -07007103 k+= TOTAL_LANDMARK_INDICES;
7104 }
7105 } else {
7106 for (size_t i = 0; i < numFaces; i++) {
7107 setInvalidLandmarks(faceLandmarks+k);
7108 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007109 }
7110 }
7111
Jason Lee49619db2017-04-13 12:07:22 -07007112 for (size_t i = 0; i < numFaces; i++) {
7113 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7114
7115 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7116 faceDetectionInfo->frame_id, i, faceIds[i]);
7117 }
7118
Thierry Strudel3d639192016-09-09 11:52:26 -07007119 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7120 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7121 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007122 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007123 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7124 CAM_INTF_META_FACE_BLINK, metadata) {
7125 uint8_t detected[MAX_ROI];
7126 uint8_t degree[MAX_ROI * 2];
7127 for (size_t i = 0; i < numFaces; i++) {
7128 detected[i] = blinks->blink[i].blink_detected;
7129 degree[2 * i] = blinks->blink[i].left_blink;
7130 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007131
Jason Lee49619db2017-04-13 12:07:22 -07007132 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7133 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7134 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7135 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007136 }
7137 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7138 detected, numFaces);
7139 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7140 degree, numFaces * 2);
7141 }
7142 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7143 CAM_INTF_META_FACE_SMILE, metadata) {
7144 uint8_t degree[MAX_ROI];
7145 uint8_t confidence[MAX_ROI];
7146 for (size_t i = 0; i < numFaces; i++) {
7147 degree[i] = smiles->smile[i].smile_degree;
7148 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007149
Jason Lee49619db2017-04-13 12:07:22 -07007150 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7151 "smile_degree=%d, smile_score=%d",
7152 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007153 }
7154 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7155 degree, numFaces);
7156 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7157 confidence, numFaces);
7158 }
7159 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7160 CAM_INTF_META_FACE_GAZE, metadata) {
7161 int8_t angle[MAX_ROI];
7162 int32_t direction[MAX_ROI * 3];
7163 int8_t degree[MAX_ROI * 2];
7164 for (size_t i = 0; i < numFaces; i++) {
7165 angle[i] = gazes->gaze[i].gaze_angle;
7166 direction[3 * i] = gazes->gaze[i].updown_dir;
7167 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7168 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7169 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7170 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007171
7172 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7173 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7174 "left_right_gaze=%d, top_bottom_gaze=%d",
7175 faceDetectionInfo->frame_id, i, angle[i],
7176 direction[3 * i], direction[3 * i + 1],
7177 direction[3 * i + 2],
7178 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007179 }
7180 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7181 (uint8_t *)angle, numFaces);
7182 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7183 direction, numFaces * 3);
7184 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7185 (uint8_t *)degree, numFaces * 2);
7186 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007187 }
7188 }
7189 }
7190 }
7191
7192 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7193 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007194 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007195 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007196 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007197
Shuzhen Wang14415f52016-11-16 18:26:18 -08007198 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7199 histogramBins = *histBins;
7200 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7201 }
7202
7203 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007204 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7205 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007206 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007207
7208 switch (stats_data->type) {
7209 case CAM_HISTOGRAM_TYPE_BAYER:
7210 switch (stats_data->bayer_stats.data_type) {
7211 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007212 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7213 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007214 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007215 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7216 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007217 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007218 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7219 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007220 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007221 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007222 case CAM_STATS_CHANNEL_R:
7223 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007224 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7225 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007226 }
7227 break;
7228 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007229 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007230 break;
7231 }
7232
Shuzhen Wang14415f52016-11-16 18:26:18 -08007233 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007234 }
7235 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007236 }
7237
7238 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7239 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7240 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7241 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7242 }
7243
7244 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7245 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7246 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7247 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7248 }
7249
7250 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7251 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7252 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7253 CAM_MAX_SHADING_MAP_HEIGHT);
7254 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7255 CAM_MAX_SHADING_MAP_WIDTH);
7256 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7257 lensShadingMap->lens_shading, 4U * map_width * map_height);
7258 }
7259
7260 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7261 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7262 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7263 }
7264
7265 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7266 //Populate CAM_INTF_META_TONEMAP_CURVES
7267 /* ch0 = G, ch 1 = B, ch 2 = R*/
7268 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7269 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7270 tonemap->tonemap_points_cnt,
7271 CAM_MAX_TONEMAP_CURVE_SIZE);
7272 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7273 }
7274
7275 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7276 &tonemap->curves[0].tonemap_points[0][0],
7277 tonemap->tonemap_points_cnt * 2);
7278
7279 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7280 &tonemap->curves[1].tonemap_points[0][0],
7281 tonemap->tonemap_points_cnt * 2);
7282
7283 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7284 &tonemap->curves[2].tonemap_points[0][0],
7285 tonemap->tonemap_points_cnt * 2);
7286 }
7287
7288 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7289 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7290 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7291 CC_GAIN_MAX);
7292 }
7293
7294 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7295 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7296 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7297 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7298 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7299 }
7300
7301 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7302 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7303 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7304 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7305 toneCurve->tonemap_points_cnt,
7306 CAM_MAX_TONEMAP_CURVE_SIZE);
7307 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7308 }
7309 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7310 (float*)toneCurve->curve.tonemap_points,
7311 toneCurve->tonemap_points_cnt * 2);
7312 }
7313
7314 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7315 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7316 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7317 predColorCorrectionGains->gains, 4);
7318 }
7319
7320 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7321 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7322 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7323 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7324 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7325 }
7326
7327 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7328 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7329 }
7330
7331 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7332 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7333 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7334 }
7335
7336 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7337 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7338 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7339 }
7340
7341 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7342 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7343 *effectMode);
7344 if (NAME_NOT_FOUND != val) {
7345 uint8_t fwk_effectMode = (uint8_t)val;
7346 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7347 }
7348 }
7349
7350 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7351 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7352 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7353 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7354 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7355 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7356 }
7357 int32_t fwk_testPatternData[4];
7358 fwk_testPatternData[0] = testPatternData->r;
7359 fwk_testPatternData[3] = testPatternData->b;
7360 switch (gCamCapability[mCameraId]->color_arrangement) {
7361 case CAM_FILTER_ARRANGEMENT_RGGB:
7362 case CAM_FILTER_ARRANGEMENT_GRBG:
7363 fwk_testPatternData[1] = testPatternData->gr;
7364 fwk_testPatternData[2] = testPatternData->gb;
7365 break;
7366 case CAM_FILTER_ARRANGEMENT_GBRG:
7367 case CAM_FILTER_ARRANGEMENT_BGGR:
7368 fwk_testPatternData[2] = testPatternData->gr;
7369 fwk_testPatternData[1] = testPatternData->gb;
7370 break;
7371 default:
7372 LOGE("color arrangement %d is not supported",
7373 gCamCapability[mCameraId]->color_arrangement);
7374 break;
7375 }
7376 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7377 }
7378
7379 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7380 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7381 }
7382
7383 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7384 String8 str((const char *)gps_methods);
7385 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7386 }
7387
7388 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7389 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7390 }
7391
7392 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7393 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7394 }
7395
7396 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7397 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7398 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7399 }
7400
7401 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7402 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7403 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7404 }
7405
7406 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7407 int32_t fwk_thumb_size[2];
7408 fwk_thumb_size[0] = thumb_size->width;
7409 fwk_thumb_size[1] = thumb_size->height;
7410 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7411 }
7412
7413 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7414 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7415 privateData,
7416 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7417 }
7418
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007419 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007420 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007421 meteringMode, 1);
7422 }
7423
Thierry Strudel54dc9782017-02-15 12:12:10 -08007424 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7425 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7426 LOGD("hdr_scene_data: %d %f\n",
7427 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7428 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7429 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7430 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7431 &isHdr, 1);
7432 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7433 &isHdrConfidence, 1);
7434 }
7435
7436
7437
Thierry Strudel3d639192016-09-09 11:52:26 -07007438 if (metadata->is_tuning_params_valid) {
7439 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7440 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7441 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7442
7443
7444 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7445 sizeof(uint32_t));
7446 data += sizeof(uint32_t);
7447
7448 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7449 sizeof(uint32_t));
7450 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7451 data += sizeof(uint32_t);
7452
7453 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7454 sizeof(uint32_t));
7455 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7456 data += sizeof(uint32_t);
7457
7458 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7459 sizeof(uint32_t));
7460 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7461 data += sizeof(uint32_t);
7462
7463 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7464 sizeof(uint32_t));
7465 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7466 data += sizeof(uint32_t);
7467
7468 metadata->tuning_params.tuning_mod3_data_size = 0;
7469 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7470 sizeof(uint32_t));
7471 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7472 data += sizeof(uint32_t);
7473
7474 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7475 TUNING_SENSOR_DATA_MAX);
7476 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7477 count);
7478 data += count;
7479
7480 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7481 TUNING_VFE_DATA_MAX);
7482 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7483 count);
7484 data += count;
7485
7486 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7487 TUNING_CPP_DATA_MAX);
7488 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7489 count);
7490 data += count;
7491
7492 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7493 TUNING_CAC_DATA_MAX);
7494 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7495 count);
7496 data += count;
7497
7498 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7499 (int32_t *)(void *)tuning_meta_data_blob,
7500 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7501 }
7502
7503 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7504 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7505 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7506 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7507 NEUTRAL_COL_POINTS);
7508 }
7509
7510 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7511 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7512 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7513 }
7514
7515 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7516 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7517 // Adjust crop region from sensor output coordinate system to active
7518 // array coordinate system.
7519 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7520 hAeRegions->rect.width, hAeRegions->rect.height);
7521
7522 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7523 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7524 REGIONS_TUPLE_COUNT);
7525 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7526 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7527 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7528 hAeRegions->rect.height);
7529 }
7530
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007531 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7532 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7533 if (NAME_NOT_FOUND != val) {
7534 uint8_t fwkAfMode = (uint8_t)val;
7535 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7536 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7537 } else {
7538 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7539 val);
7540 }
7541 }
7542
Thierry Strudel3d639192016-09-09 11:52:26 -07007543 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7544 uint8_t fwk_afState = (uint8_t) *afState;
7545 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007546 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007547 }
7548
7549 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7550 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7551 }
7552
7553 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7554 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7555 }
7556
7557 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7558 uint8_t fwk_lensState = *lensState;
7559 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7560 }
7561
Thierry Strudel3d639192016-09-09 11:52:26 -07007562
7563 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007564 uint32_t ab_mode = *hal_ab_mode;
7565 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7566 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7567 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7568 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007569 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007570 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007571 if (NAME_NOT_FOUND != val) {
7572 uint8_t fwk_ab_mode = (uint8_t)val;
7573 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7574 }
7575 }
7576
7577 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7578 int val = lookupFwkName(SCENE_MODES_MAP,
7579 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7580 if (NAME_NOT_FOUND != val) {
7581 uint8_t fwkBestshotMode = (uint8_t)val;
7582 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7583 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7584 } else {
7585 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7586 }
7587 }
7588
7589 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7590 uint8_t fwk_mode = (uint8_t) *mode;
7591 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7592 }
7593
7594 /* Constant metadata values to be update*/
7595 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7596 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7597
7598 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7599 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7600
7601 int32_t hotPixelMap[2];
7602 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7603
7604 // CDS
7605 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7606 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7607 }
7608
Thierry Strudel04e026f2016-10-10 11:27:36 -07007609 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7610 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007611 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007612 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7613 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7614 } else {
7615 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7616 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007617
7618 if(fwk_hdr != curr_hdr_state) {
7619 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7620 if(fwk_hdr)
7621 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7622 else
7623 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7624 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007625 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7626 }
7627
Thierry Strudel54dc9782017-02-15 12:12:10 -08007628 //binning correction
7629 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7630 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7631 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7632 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7633 }
7634
Thierry Strudel04e026f2016-10-10 11:27:36 -07007635 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007636 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007637 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7638 int8_t is_ir_on = 0;
7639
7640 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7641 if(is_ir_on != curr_ir_state) {
7642 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7643 if(is_ir_on)
7644 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7645 else
7646 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7647 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007648 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007649 }
7650
Thierry Strudel269c81a2016-10-12 12:13:59 -07007651 // AEC SPEED
7652 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7653 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7654 }
7655
7656 // AWB SPEED
7657 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7658 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7659 }
7660
Thierry Strudel3d639192016-09-09 11:52:26 -07007661 // TNR
7662 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7663 uint8_t tnr_enable = tnr->denoise_enable;
7664 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007665 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7666 int8_t is_tnr_on = 0;
7667
7668 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7669 if(is_tnr_on != curr_tnr_state) {
7670 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7671 if(is_tnr_on)
7672 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7673 else
7674 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7675 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007676
7677 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7678 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7679 }
7680
7681 // Reprocess crop data
7682 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7683 uint8_t cnt = crop_data->num_of_streams;
7684 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7685 // mm-qcamera-daemon only posts crop_data for streams
7686 // not linked to pproc. So no valid crop metadata is not
7687 // necessarily an error case.
7688 LOGD("No valid crop metadata entries");
7689 } else {
7690 uint32_t reproc_stream_id;
7691 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7692 LOGD("No reprocessible stream found, ignore crop data");
7693 } else {
7694 int rc = NO_ERROR;
7695 Vector<int32_t> roi_map;
7696 int32_t *crop = new int32_t[cnt*4];
7697 if (NULL == crop) {
7698 rc = NO_MEMORY;
7699 }
7700 if (NO_ERROR == rc) {
7701 int32_t streams_found = 0;
7702 for (size_t i = 0; i < cnt; i++) {
7703 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7704 if (pprocDone) {
7705 // HAL already does internal reprocessing,
7706 // either via reprocessing before JPEG encoding,
7707 // or offline postprocessing for pproc bypass case.
7708 crop[0] = 0;
7709 crop[1] = 0;
7710 crop[2] = mInputStreamInfo.dim.width;
7711 crop[3] = mInputStreamInfo.dim.height;
7712 } else {
7713 crop[0] = crop_data->crop_info[i].crop.left;
7714 crop[1] = crop_data->crop_info[i].crop.top;
7715 crop[2] = crop_data->crop_info[i].crop.width;
7716 crop[3] = crop_data->crop_info[i].crop.height;
7717 }
7718 roi_map.add(crop_data->crop_info[i].roi_map.left);
7719 roi_map.add(crop_data->crop_info[i].roi_map.top);
7720 roi_map.add(crop_data->crop_info[i].roi_map.width);
7721 roi_map.add(crop_data->crop_info[i].roi_map.height);
7722 streams_found++;
7723 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7724 crop[0], crop[1], crop[2], crop[3]);
7725 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7726 crop_data->crop_info[i].roi_map.left,
7727 crop_data->crop_info[i].roi_map.top,
7728 crop_data->crop_info[i].roi_map.width,
7729 crop_data->crop_info[i].roi_map.height);
7730 break;
7731
7732 }
7733 }
7734 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7735 &streams_found, 1);
7736 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7737 crop, (size_t)(streams_found * 4));
7738 if (roi_map.array()) {
7739 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7740 roi_map.array(), roi_map.size());
7741 }
7742 }
7743 if (crop) {
7744 delete [] crop;
7745 }
7746 }
7747 }
7748 }
7749
7750 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7751 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7752 // so hardcoding the CAC result to OFF mode.
7753 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7754 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7755 } else {
7756 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7757 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7758 *cacMode);
7759 if (NAME_NOT_FOUND != val) {
7760 uint8_t resultCacMode = (uint8_t)val;
7761 // check whether CAC result from CB is equal to Framework set CAC mode
7762 // If not equal then set the CAC mode came in corresponding request
7763 if (fwk_cacMode != resultCacMode) {
7764 resultCacMode = fwk_cacMode;
7765 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007766 //Check if CAC is disabled by property
7767 if (m_cacModeDisabled) {
7768 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7769 }
7770
Thierry Strudel3d639192016-09-09 11:52:26 -07007771 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7772 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7773 } else {
7774 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7775 }
7776 }
7777 }
7778
7779 // Post blob of cam_cds_data through vendor tag.
7780 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7781 uint8_t cnt = cdsInfo->num_of_streams;
7782 cam_cds_data_t cdsDataOverride;
7783 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7784 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7785 cdsDataOverride.num_of_streams = 1;
7786 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7787 uint32_t reproc_stream_id;
7788 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7789 LOGD("No reprocessible stream found, ignore cds data");
7790 } else {
7791 for (size_t i = 0; i < cnt; i++) {
7792 if (cdsInfo->cds_info[i].stream_id ==
7793 reproc_stream_id) {
7794 cdsDataOverride.cds_info[0].cds_enable =
7795 cdsInfo->cds_info[i].cds_enable;
7796 break;
7797 }
7798 }
7799 }
7800 } else {
7801 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7802 }
7803 camMetadata.update(QCAMERA3_CDS_INFO,
7804 (uint8_t *)&cdsDataOverride,
7805 sizeof(cam_cds_data_t));
7806 }
7807
7808 // Ldaf calibration data
7809 if (!mLdafCalibExist) {
7810 IF_META_AVAILABLE(uint32_t, ldafCalib,
7811 CAM_INTF_META_LDAF_EXIF, metadata) {
7812 mLdafCalibExist = true;
7813 mLdafCalib[0] = ldafCalib[0];
7814 mLdafCalib[1] = ldafCalib[1];
7815 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7816 ldafCalib[0], ldafCalib[1]);
7817 }
7818 }
7819
Thierry Strudel54dc9782017-02-15 12:12:10 -08007820 // EXIF debug data through vendor tag
7821 /*
7822 * Mobicat Mask can assume 3 values:
7823 * 1 refers to Mobicat data,
7824 * 2 refers to Stats Debug and Exif Debug Data
7825 * 3 refers to Mobicat and Stats Debug Data
7826 * We want to make sure that we are sending Exif debug data
7827 * only when Mobicat Mask is 2.
7828 */
7829 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7830 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7831 (uint8_t *)(void *)mExifParams.debug_params,
7832 sizeof(mm_jpeg_debug_exif_params_t));
7833 }
7834
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007835 // Reprocess and DDM debug data through vendor tag
7836 cam_reprocess_info_t repro_info;
7837 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007838 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7839 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007840 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007841 }
7842 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7843 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007844 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007845 }
7846 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7847 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007848 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007849 }
7850 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7851 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007852 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007853 }
7854 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7855 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007856 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007857 }
7858 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007859 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007860 }
7861 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7862 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007863 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007864 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007865 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7866 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7867 }
7868 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7869 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7870 }
7871 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7872 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007873
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007874 // INSTANT AEC MODE
7875 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7876 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7877 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7878 }
7879
Shuzhen Wange763e802016-03-31 10:24:29 -07007880 // AF scene change
7881 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7882 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7883 }
7884
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07007885 // Enable ZSL
7886 if (enableZsl != nullptr) {
7887 uint8_t value = *enableZsl ?
7888 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
7889 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
7890 }
7891
Thierry Strudel3d639192016-09-09 11:52:26 -07007892 resultMetadata = camMetadata.release();
7893 return resultMetadata;
7894}
7895
7896/*===========================================================================
7897 * FUNCTION : saveExifParams
7898 *
7899 * DESCRIPTION:
7900 *
7901 * PARAMETERS :
7902 * @metadata : metadata information from callback
7903 *
7904 * RETURN : none
7905 *
7906 *==========================================================================*/
7907void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7908{
7909 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7910 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7911 if (mExifParams.debug_params) {
7912 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7913 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7914 }
7915 }
7916 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7917 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7918 if (mExifParams.debug_params) {
7919 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7920 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7921 }
7922 }
7923 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7924 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7925 if (mExifParams.debug_params) {
7926 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7927 mExifParams.debug_params->af_debug_params_valid = TRUE;
7928 }
7929 }
7930 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7931 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7932 if (mExifParams.debug_params) {
7933 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7934 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7935 }
7936 }
7937 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7938 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7939 if (mExifParams.debug_params) {
7940 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7941 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7942 }
7943 }
7944 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7945 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7946 if (mExifParams.debug_params) {
7947 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7948 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7949 }
7950 }
7951 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7952 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7953 if (mExifParams.debug_params) {
7954 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7955 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7956 }
7957 }
7958 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7959 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7960 if (mExifParams.debug_params) {
7961 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7962 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7963 }
7964 }
7965}
7966
7967/*===========================================================================
7968 * FUNCTION : get3AExifParams
7969 *
7970 * DESCRIPTION:
7971 *
7972 * PARAMETERS : none
7973 *
7974 *
7975 * RETURN : mm_jpeg_exif_params_t
7976 *
7977 *==========================================================================*/
7978mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7979{
7980 return mExifParams;
7981}
7982
7983/*===========================================================================
7984 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7985 *
7986 * DESCRIPTION:
7987 *
7988 * PARAMETERS :
7989 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007990 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
7991 * urgent metadata in a batch. Always true for
7992 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07007993 *
7994 * RETURN : camera_metadata_t*
7995 * metadata in a format specified by fwk
7996 *==========================================================================*/
7997camera_metadata_t*
7998QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007999 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008000{
8001 CameraMetadata camMetadata;
8002 camera_metadata_t *resultMetadata;
8003
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008004 if (!lastUrgentMetadataInBatch) {
8005 /* In batch mode, use empty metadata if this is not the last in batch
8006 */
8007 resultMetadata = allocate_camera_metadata(0, 0);
8008 return resultMetadata;
8009 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008010
8011 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8012 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8013 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8014 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8015 }
8016
8017 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8018 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8019 &aecTrigger->trigger, 1);
8020 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8021 &aecTrigger->trigger_id, 1);
8022 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8023 aecTrigger->trigger);
8024 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8025 aecTrigger->trigger_id);
8026 }
8027
8028 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8029 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8030 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8031 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8032 }
8033
Thierry Strudel3d639192016-09-09 11:52:26 -07008034 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8035 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8036 &af_trigger->trigger, 1);
8037 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8038 af_trigger->trigger);
8039 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8040 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8041 af_trigger->trigger_id);
8042 }
8043
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008044 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8045 /*af regions*/
8046 int32_t afRegions[REGIONS_TUPLE_COUNT];
8047 // Adjust crop region from sensor output coordinate system to active
8048 // array coordinate system.
8049 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8050 hAfRegions->rect.width, hAfRegions->rect.height);
8051
8052 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8053 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8054 REGIONS_TUPLE_COUNT);
8055 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8056 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8057 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8058 hAfRegions->rect.height);
8059 }
8060
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008061 // AF region confidence
8062 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8063 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8064 }
8065
Thierry Strudel3d639192016-09-09 11:52:26 -07008066 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8067 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8068 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8069 if (NAME_NOT_FOUND != val) {
8070 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8071 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8072 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8073 } else {
8074 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8075 }
8076 }
8077
8078 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8079 uint32_t aeMode = CAM_AE_MODE_MAX;
8080 int32_t flashMode = CAM_FLASH_MODE_MAX;
8081 int32_t redeye = -1;
8082 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8083 aeMode = *pAeMode;
8084 }
8085 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8086 flashMode = *pFlashMode;
8087 }
8088 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8089 redeye = *pRedeye;
8090 }
8091
8092 if (1 == redeye) {
8093 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8094 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8095 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8096 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8097 flashMode);
8098 if (NAME_NOT_FOUND != val) {
8099 fwk_aeMode = (uint8_t)val;
8100 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8101 } else {
8102 LOGE("Unsupported flash mode %d", flashMode);
8103 }
8104 } else if (aeMode == CAM_AE_MODE_ON) {
8105 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8106 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8107 } else if (aeMode == CAM_AE_MODE_OFF) {
8108 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8109 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008110 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8111 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8112 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008113 } else {
8114 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8115 "flashMode:%d, aeMode:%u!!!",
8116 redeye, flashMode, aeMode);
8117 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008118 if (mInstantAEC) {
8119 // Increment frame Idx count untill a bound reached for instant AEC.
8120 mInstantAecFrameIdxCount++;
8121 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8122 CAM_INTF_META_AEC_INFO, metadata) {
8123 LOGH("ae_params->settled = %d",ae_params->settled);
8124 // If AEC settled, or if number of frames reached bound value,
8125 // should reset instant AEC.
8126 if (ae_params->settled ||
8127 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8128 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8129 mInstantAEC = false;
8130 mResetInstantAEC = true;
8131 mInstantAecFrameIdxCount = 0;
8132 }
8133 }
8134 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008135 resultMetadata = camMetadata.release();
8136 return resultMetadata;
8137}
8138
8139/*===========================================================================
8140 * FUNCTION : dumpMetadataToFile
8141 *
8142 * DESCRIPTION: Dumps tuning metadata to file system
8143 *
8144 * PARAMETERS :
8145 * @meta : tuning metadata
8146 * @dumpFrameCount : current dump frame count
8147 * @enabled : Enable mask
8148 *
8149 *==========================================================================*/
8150void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8151 uint32_t &dumpFrameCount,
8152 bool enabled,
8153 const char *type,
8154 uint32_t frameNumber)
8155{
8156 //Some sanity checks
8157 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8158 LOGE("Tuning sensor data size bigger than expected %d: %d",
8159 meta.tuning_sensor_data_size,
8160 TUNING_SENSOR_DATA_MAX);
8161 return;
8162 }
8163
8164 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8165 LOGE("Tuning VFE data size bigger than expected %d: %d",
8166 meta.tuning_vfe_data_size,
8167 TUNING_VFE_DATA_MAX);
8168 return;
8169 }
8170
8171 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8172 LOGE("Tuning CPP data size bigger than expected %d: %d",
8173 meta.tuning_cpp_data_size,
8174 TUNING_CPP_DATA_MAX);
8175 return;
8176 }
8177
8178 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8179 LOGE("Tuning CAC data size bigger than expected %d: %d",
8180 meta.tuning_cac_data_size,
8181 TUNING_CAC_DATA_MAX);
8182 return;
8183 }
8184 //
8185
8186 if(enabled){
8187 char timeBuf[FILENAME_MAX];
8188 char buf[FILENAME_MAX];
8189 memset(buf, 0, sizeof(buf));
8190 memset(timeBuf, 0, sizeof(timeBuf));
8191 time_t current_time;
8192 struct tm * timeinfo;
8193 time (&current_time);
8194 timeinfo = localtime (&current_time);
8195 if (timeinfo != NULL) {
8196 strftime (timeBuf, sizeof(timeBuf),
8197 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8198 }
8199 String8 filePath(timeBuf);
8200 snprintf(buf,
8201 sizeof(buf),
8202 "%dm_%s_%d.bin",
8203 dumpFrameCount,
8204 type,
8205 frameNumber);
8206 filePath.append(buf);
8207 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8208 if (file_fd >= 0) {
8209 ssize_t written_len = 0;
8210 meta.tuning_data_version = TUNING_DATA_VERSION;
8211 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8212 written_len += write(file_fd, data, sizeof(uint32_t));
8213 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8214 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8215 written_len += write(file_fd, data, sizeof(uint32_t));
8216 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8217 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8218 written_len += write(file_fd, data, sizeof(uint32_t));
8219 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8220 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8221 written_len += write(file_fd, data, sizeof(uint32_t));
8222 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8223 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8224 written_len += write(file_fd, data, sizeof(uint32_t));
8225 meta.tuning_mod3_data_size = 0;
8226 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8227 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8228 written_len += write(file_fd, data, sizeof(uint32_t));
8229 size_t total_size = meta.tuning_sensor_data_size;
8230 data = (void *)((uint8_t *)&meta.data);
8231 written_len += write(file_fd, data, total_size);
8232 total_size = meta.tuning_vfe_data_size;
8233 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8234 written_len += write(file_fd, data, total_size);
8235 total_size = meta.tuning_cpp_data_size;
8236 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8237 written_len += write(file_fd, data, total_size);
8238 total_size = meta.tuning_cac_data_size;
8239 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8240 written_len += write(file_fd, data, total_size);
8241 close(file_fd);
8242 }else {
8243 LOGE("fail to open file for metadata dumping");
8244 }
8245 }
8246}
8247
8248/*===========================================================================
8249 * FUNCTION : cleanAndSortStreamInfo
8250 *
8251 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8252 * and sort them such that raw stream is at the end of the list
8253 * This is a workaround for camera daemon constraint.
8254 *
8255 * PARAMETERS : None
8256 *
8257 *==========================================================================*/
8258void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8259{
8260 List<stream_info_t *> newStreamInfo;
8261
8262 /*clean up invalid streams*/
8263 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8264 it != mStreamInfo.end();) {
8265 if(((*it)->status) == INVALID){
8266 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8267 delete channel;
8268 free(*it);
8269 it = mStreamInfo.erase(it);
8270 } else {
8271 it++;
8272 }
8273 }
8274
8275 // Move preview/video/callback/snapshot streams into newList
8276 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8277 it != mStreamInfo.end();) {
8278 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8279 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8280 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8281 newStreamInfo.push_back(*it);
8282 it = mStreamInfo.erase(it);
8283 } else
8284 it++;
8285 }
8286 // Move raw streams into newList
8287 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8288 it != mStreamInfo.end();) {
8289 newStreamInfo.push_back(*it);
8290 it = mStreamInfo.erase(it);
8291 }
8292
8293 mStreamInfo = newStreamInfo;
8294}
8295
8296/*===========================================================================
8297 * FUNCTION : extractJpegMetadata
8298 *
8299 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8300 * JPEG metadata is cached in HAL, and return as part of capture
8301 * result when metadata is returned from camera daemon.
8302 *
8303 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8304 * @request: capture request
8305 *
8306 *==========================================================================*/
8307void QCamera3HardwareInterface::extractJpegMetadata(
8308 CameraMetadata& jpegMetadata,
8309 const camera3_capture_request_t *request)
8310{
8311 CameraMetadata frame_settings;
8312 frame_settings = request->settings;
8313
8314 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8315 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8316 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8317 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8318
8319 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8320 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8321 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8322 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8323
8324 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8325 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8326 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8327 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8328
8329 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8330 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8331 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8332 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8333
8334 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8335 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8336 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8337 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8338
8339 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8340 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8341 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8342 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8343
8344 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8345 int32_t thumbnail_size[2];
8346 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8347 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8348 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8349 int32_t orientation =
8350 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008351 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008352 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8353 int32_t temp;
8354 temp = thumbnail_size[0];
8355 thumbnail_size[0] = thumbnail_size[1];
8356 thumbnail_size[1] = temp;
8357 }
8358 }
8359 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8360 thumbnail_size,
8361 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8362 }
8363
8364}
8365
8366/*===========================================================================
8367 * FUNCTION : convertToRegions
8368 *
8369 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8370 *
8371 * PARAMETERS :
8372 * @rect : cam_rect_t struct to convert
8373 * @region : int32_t destination array
8374 * @weight : if we are converting from cam_area_t, weight is valid
8375 * else weight = -1
8376 *
8377 *==========================================================================*/
8378void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8379 int32_t *region, int weight)
8380{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008381 region[FACE_LEFT] = rect.left;
8382 region[FACE_TOP] = rect.top;
8383 region[FACE_RIGHT] = rect.left + rect.width;
8384 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008385 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008386 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008387 }
8388}
8389
8390/*===========================================================================
8391 * FUNCTION : convertFromRegions
8392 *
8393 * DESCRIPTION: helper method to convert from array to cam_rect_t
8394 *
8395 * PARAMETERS :
8396 * @rect : cam_rect_t struct to convert
8397 * @region : int32_t destination array
8398 * @weight : if we are converting from cam_area_t, weight is valid
8399 * else weight = -1
8400 *
8401 *==========================================================================*/
8402void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008403 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008404{
Thierry Strudel3d639192016-09-09 11:52:26 -07008405 int32_t x_min = frame_settings.find(tag).data.i32[0];
8406 int32_t y_min = frame_settings.find(tag).data.i32[1];
8407 int32_t x_max = frame_settings.find(tag).data.i32[2];
8408 int32_t y_max = frame_settings.find(tag).data.i32[3];
8409 roi.weight = frame_settings.find(tag).data.i32[4];
8410 roi.rect.left = x_min;
8411 roi.rect.top = y_min;
8412 roi.rect.width = x_max - x_min;
8413 roi.rect.height = y_max - y_min;
8414}
8415
8416/*===========================================================================
8417 * FUNCTION : resetIfNeededROI
8418 *
8419 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8420 * crop region
8421 *
8422 * PARAMETERS :
8423 * @roi : cam_area_t struct to resize
8424 * @scalerCropRegion : cam_crop_region_t region to compare against
8425 *
8426 *
8427 *==========================================================================*/
8428bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8429 const cam_crop_region_t* scalerCropRegion)
8430{
8431 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8432 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8433 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8434 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8435
8436 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8437 * without having this check the calculations below to validate if the roi
8438 * is inside scalar crop region will fail resulting in the roi not being
8439 * reset causing algorithm to continue to use stale roi window
8440 */
8441 if (roi->weight == 0) {
8442 return true;
8443 }
8444
8445 if ((roi_x_max < scalerCropRegion->left) ||
8446 // right edge of roi window is left of scalar crop's left edge
8447 (roi_y_max < scalerCropRegion->top) ||
8448 // bottom edge of roi window is above scalar crop's top edge
8449 (roi->rect.left > crop_x_max) ||
8450 // left edge of roi window is beyond(right) of scalar crop's right edge
8451 (roi->rect.top > crop_y_max)){
8452 // top edge of roi windo is above scalar crop's top edge
8453 return false;
8454 }
8455 if (roi->rect.left < scalerCropRegion->left) {
8456 roi->rect.left = scalerCropRegion->left;
8457 }
8458 if (roi->rect.top < scalerCropRegion->top) {
8459 roi->rect.top = scalerCropRegion->top;
8460 }
8461 if (roi_x_max > crop_x_max) {
8462 roi_x_max = crop_x_max;
8463 }
8464 if (roi_y_max > crop_y_max) {
8465 roi_y_max = crop_y_max;
8466 }
8467 roi->rect.width = roi_x_max - roi->rect.left;
8468 roi->rect.height = roi_y_max - roi->rect.top;
8469 return true;
8470}
8471
8472/*===========================================================================
8473 * FUNCTION : convertLandmarks
8474 *
8475 * DESCRIPTION: helper method to extract the landmarks from face detection info
8476 *
8477 * PARAMETERS :
8478 * @landmark_data : input landmark data to be converted
8479 * @landmarks : int32_t destination array
8480 *
8481 *
8482 *==========================================================================*/
8483void QCamera3HardwareInterface::convertLandmarks(
8484 cam_face_landmarks_info_t landmark_data,
8485 int32_t *landmarks)
8486{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008487 if (landmark_data.is_left_eye_valid) {
8488 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8489 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8490 } else {
8491 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8492 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8493 }
8494
8495 if (landmark_data.is_right_eye_valid) {
8496 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8497 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8498 } else {
8499 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8500 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8501 }
8502
8503 if (landmark_data.is_mouth_valid) {
8504 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8505 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8506 } else {
8507 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8508 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8509 }
8510}
8511
8512/*===========================================================================
8513 * FUNCTION : setInvalidLandmarks
8514 *
8515 * DESCRIPTION: helper method to set invalid landmarks
8516 *
8517 * PARAMETERS :
8518 * @landmarks : int32_t destination array
8519 *
8520 *
8521 *==========================================================================*/
8522void QCamera3HardwareInterface::setInvalidLandmarks(
8523 int32_t *landmarks)
8524{
8525 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8526 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8527 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8528 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8529 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8530 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008531}
8532
8533#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008534
8535/*===========================================================================
8536 * FUNCTION : getCapabilities
8537 *
8538 * DESCRIPTION: query camera capability from back-end
8539 *
8540 * PARAMETERS :
8541 * @ops : mm-interface ops structure
8542 * @cam_handle : camera handle for which we need capability
8543 *
8544 * RETURN : ptr type of capability structure
8545 * capability for success
8546 * NULL for failure
8547 *==========================================================================*/
8548cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8549 uint32_t cam_handle)
8550{
8551 int rc = NO_ERROR;
8552 QCamera3HeapMemory *capabilityHeap = NULL;
8553 cam_capability_t *cap_ptr = NULL;
8554
8555 if (ops == NULL) {
8556 LOGE("Invalid arguments");
8557 return NULL;
8558 }
8559
8560 capabilityHeap = new QCamera3HeapMemory(1);
8561 if (capabilityHeap == NULL) {
8562 LOGE("creation of capabilityHeap failed");
8563 return NULL;
8564 }
8565
8566 /* Allocate memory for capability buffer */
8567 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8568 if(rc != OK) {
8569 LOGE("No memory for cappability");
8570 goto allocate_failed;
8571 }
8572
8573 /* Map memory for capability buffer */
8574 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8575
8576 rc = ops->map_buf(cam_handle,
8577 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8578 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8579 if(rc < 0) {
8580 LOGE("failed to map capability buffer");
8581 rc = FAILED_TRANSACTION;
8582 goto map_failed;
8583 }
8584
8585 /* Query Capability */
8586 rc = ops->query_capability(cam_handle);
8587 if(rc < 0) {
8588 LOGE("failed to query capability");
8589 rc = FAILED_TRANSACTION;
8590 goto query_failed;
8591 }
8592
8593 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8594 if (cap_ptr == NULL) {
8595 LOGE("out of memory");
8596 rc = NO_MEMORY;
8597 goto query_failed;
8598 }
8599
8600 memset(cap_ptr, 0, sizeof(cam_capability_t));
8601 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8602
8603 int index;
8604 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8605 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8606 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8607 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8608 }
8609
8610query_failed:
8611 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8612map_failed:
8613 capabilityHeap->deallocate();
8614allocate_failed:
8615 delete capabilityHeap;
8616
8617 if (rc != NO_ERROR) {
8618 return NULL;
8619 } else {
8620 return cap_ptr;
8621 }
8622}
8623
Thierry Strudel3d639192016-09-09 11:52:26 -07008624/*===========================================================================
8625 * FUNCTION : initCapabilities
8626 *
8627 * DESCRIPTION: initialize camera capabilities in static data struct
8628 *
8629 * PARAMETERS :
8630 * @cameraId : camera Id
8631 *
8632 * RETURN : int32_t type of status
8633 * NO_ERROR -- success
8634 * none-zero failure code
8635 *==========================================================================*/
8636int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8637{
8638 int rc = 0;
8639 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008640 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008641
8642 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8643 if (rc) {
8644 LOGE("camera_open failed. rc = %d", rc);
8645 goto open_failed;
8646 }
8647 if (!cameraHandle) {
8648 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8649 goto open_failed;
8650 }
8651
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008652 handle = get_main_camera_handle(cameraHandle->camera_handle);
8653 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8654 if (gCamCapability[cameraId] == NULL) {
8655 rc = FAILED_TRANSACTION;
8656 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008657 }
8658
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008659 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008660 if (is_dual_camera_by_idx(cameraId)) {
8661 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8662 gCamCapability[cameraId]->aux_cam_cap =
8663 getCapabilities(cameraHandle->ops, handle);
8664 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8665 rc = FAILED_TRANSACTION;
8666 free(gCamCapability[cameraId]);
8667 goto failed_op;
8668 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008669
8670 // Copy the main camera capability to main_cam_cap struct
8671 gCamCapability[cameraId]->main_cam_cap =
8672 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8673 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8674 LOGE("out of memory");
8675 rc = NO_MEMORY;
8676 goto failed_op;
8677 }
8678 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8679 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008680 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008681failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008682 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8683 cameraHandle = NULL;
8684open_failed:
8685 return rc;
8686}
8687
8688/*==========================================================================
8689 * FUNCTION : get3Aversion
8690 *
8691 * DESCRIPTION: get the Q3A S/W version
8692 *
8693 * PARAMETERS :
8694 * @sw_version: Reference of Q3A structure which will hold version info upon
8695 * return
8696 *
8697 * RETURN : None
8698 *
8699 *==========================================================================*/
8700void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8701{
8702 if(gCamCapability[mCameraId])
8703 sw_version = gCamCapability[mCameraId]->q3a_version;
8704 else
8705 LOGE("Capability structure NULL!");
8706}
8707
8708
8709/*===========================================================================
8710 * FUNCTION : initParameters
8711 *
8712 * DESCRIPTION: initialize camera parameters
8713 *
8714 * PARAMETERS :
8715 *
8716 * RETURN : int32_t type of status
8717 * NO_ERROR -- success
8718 * none-zero failure code
8719 *==========================================================================*/
8720int QCamera3HardwareInterface::initParameters()
8721{
8722 int rc = 0;
8723
8724 //Allocate Set Param Buffer
8725 mParamHeap = new QCamera3HeapMemory(1);
8726 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8727 if(rc != OK) {
8728 rc = NO_MEMORY;
8729 LOGE("Failed to allocate SETPARM Heap memory");
8730 delete mParamHeap;
8731 mParamHeap = NULL;
8732 return rc;
8733 }
8734
8735 //Map memory for parameters buffer
8736 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8737 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8738 mParamHeap->getFd(0),
8739 sizeof(metadata_buffer_t),
8740 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8741 if(rc < 0) {
8742 LOGE("failed to map SETPARM buffer");
8743 rc = FAILED_TRANSACTION;
8744 mParamHeap->deallocate();
8745 delete mParamHeap;
8746 mParamHeap = NULL;
8747 return rc;
8748 }
8749
8750 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8751
8752 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8753 return rc;
8754}
8755
8756/*===========================================================================
8757 * FUNCTION : deinitParameters
8758 *
8759 * DESCRIPTION: de-initialize camera parameters
8760 *
8761 * PARAMETERS :
8762 *
8763 * RETURN : NONE
8764 *==========================================================================*/
8765void QCamera3HardwareInterface::deinitParameters()
8766{
8767 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8768 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8769
8770 mParamHeap->deallocate();
8771 delete mParamHeap;
8772 mParamHeap = NULL;
8773
8774 mParameters = NULL;
8775
8776 free(mPrevParameters);
8777 mPrevParameters = NULL;
8778}
8779
8780/*===========================================================================
8781 * FUNCTION : calcMaxJpegSize
8782 *
8783 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8784 *
8785 * PARAMETERS :
8786 *
8787 * RETURN : max_jpeg_size
8788 *==========================================================================*/
8789size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8790{
8791 size_t max_jpeg_size = 0;
8792 size_t temp_width, temp_height;
8793 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8794 MAX_SIZES_CNT);
8795 for (size_t i = 0; i < count; i++) {
8796 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8797 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8798 if (temp_width * temp_height > max_jpeg_size ) {
8799 max_jpeg_size = temp_width * temp_height;
8800 }
8801 }
8802 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8803 return max_jpeg_size;
8804}
8805
8806/*===========================================================================
8807 * FUNCTION : getMaxRawSize
8808 *
8809 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8810 *
8811 * PARAMETERS :
8812 *
8813 * RETURN : Largest supported Raw Dimension
8814 *==========================================================================*/
8815cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8816{
8817 int max_width = 0;
8818 cam_dimension_t maxRawSize;
8819
8820 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8821 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8822 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8823 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8824 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8825 }
8826 }
8827 return maxRawSize;
8828}
8829
8830
8831/*===========================================================================
8832 * FUNCTION : calcMaxJpegDim
8833 *
8834 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8835 *
8836 * PARAMETERS :
8837 *
8838 * RETURN : max_jpeg_dim
8839 *==========================================================================*/
8840cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8841{
8842 cam_dimension_t max_jpeg_dim;
8843 cam_dimension_t curr_jpeg_dim;
8844 max_jpeg_dim.width = 0;
8845 max_jpeg_dim.height = 0;
8846 curr_jpeg_dim.width = 0;
8847 curr_jpeg_dim.height = 0;
8848 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8849 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8850 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8851 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8852 max_jpeg_dim.width * max_jpeg_dim.height ) {
8853 max_jpeg_dim.width = curr_jpeg_dim.width;
8854 max_jpeg_dim.height = curr_jpeg_dim.height;
8855 }
8856 }
8857 return max_jpeg_dim;
8858}
8859
8860/*===========================================================================
8861 * FUNCTION : addStreamConfig
8862 *
8863 * DESCRIPTION: adds the stream configuration to the array
8864 *
8865 * PARAMETERS :
8866 * @available_stream_configs : pointer to stream configuration array
8867 * @scalar_format : scalar format
8868 * @dim : configuration dimension
8869 * @config_type : input or output configuration type
8870 *
8871 * RETURN : NONE
8872 *==========================================================================*/
8873void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8874 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8875{
8876 available_stream_configs.add(scalar_format);
8877 available_stream_configs.add(dim.width);
8878 available_stream_configs.add(dim.height);
8879 available_stream_configs.add(config_type);
8880}
8881
8882/*===========================================================================
8883 * FUNCTION : suppportBurstCapture
8884 *
8885 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8886 *
8887 * PARAMETERS :
8888 * @cameraId : camera Id
8889 *
8890 * RETURN : true if camera supports BURST_CAPTURE
8891 * false otherwise
8892 *==========================================================================*/
8893bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8894{
8895 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8896 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8897 const int32_t highResWidth = 3264;
8898 const int32_t highResHeight = 2448;
8899
8900 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8901 // Maximum resolution images cannot be captured at >= 10fps
8902 // -> not supporting BURST_CAPTURE
8903 return false;
8904 }
8905
8906 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8907 // Maximum resolution images can be captured at >= 20fps
8908 // --> supporting BURST_CAPTURE
8909 return true;
8910 }
8911
8912 // Find the smallest highRes resolution, or largest resolution if there is none
8913 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8914 MAX_SIZES_CNT);
8915 size_t highRes = 0;
8916 while ((highRes + 1 < totalCnt) &&
8917 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8918 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8919 highResWidth * highResHeight)) {
8920 highRes++;
8921 }
8922 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8923 return true;
8924 } else {
8925 return false;
8926 }
8927}
8928
8929/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008930 * FUNCTION : getPDStatIndex
8931 *
8932 * DESCRIPTION: Return the meta raw phase detection statistics index if present
8933 *
8934 * PARAMETERS :
8935 * @caps : camera capabilities
8936 *
8937 * RETURN : int32_t type
8938 * non-negative - on success
8939 * -1 - on failure
8940 *==========================================================================*/
8941int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
8942 if (nullptr == caps) {
8943 return -1;
8944 }
8945
8946 uint32_t metaRawCount = caps->meta_raw_channel_count;
8947 int32_t ret = -1;
8948 for (size_t i = 0; i < metaRawCount; i++) {
8949 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
8950 ret = i;
8951 break;
8952 }
8953 }
8954
8955 return ret;
8956}
8957
8958/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07008959 * FUNCTION : initStaticMetadata
8960 *
8961 * DESCRIPTION: initialize the static metadata
8962 *
8963 * PARAMETERS :
8964 * @cameraId : camera Id
8965 *
8966 * RETURN : int32_t type of status
8967 * 0 -- success
8968 * non-zero failure code
8969 *==========================================================================*/
8970int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8971{
8972 int rc = 0;
8973 CameraMetadata staticInfo;
8974 size_t count = 0;
8975 bool limitedDevice = false;
8976 char prop[PROPERTY_VALUE_MAX];
8977 bool supportBurst = false;
8978
8979 supportBurst = supportBurstCapture(cameraId);
8980
8981 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8982 * guaranteed or if min fps of max resolution is less than 20 fps, its
8983 * advertised as limited device*/
8984 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8985 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8986 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8987 !supportBurst;
8988
8989 uint8_t supportedHwLvl = limitedDevice ?
8990 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008991#ifndef USE_HAL_3_3
8992 // LEVEL_3 - This device will support level 3.
8993 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8994#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008995 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008996#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008997
8998 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8999 &supportedHwLvl, 1);
9000
9001 bool facingBack = false;
9002 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9003 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9004 facingBack = true;
9005 }
9006 /*HAL 3 only*/
9007 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9008 &gCamCapability[cameraId]->min_focus_distance, 1);
9009
9010 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9011 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9012
9013 /*should be using focal lengths but sensor doesn't provide that info now*/
9014 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9015 &gCamCapability[cameraId]->focal_length,
9016 1);
9017
9018 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9019 gCamCapability[cameraId]->apertures,
9020 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9021
9022 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9023 gCamCapability[cameraId]->filter_densities,
9024 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9025
9026
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009027 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9028 size_t mode_count =
9029 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9030 for (size_t i = 0; i < mode_count; i++) {
9031 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9032 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009033 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009034 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009035
9036 int32_t lens_shading_map_size[] = {
9037 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9038 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9039 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9040 lens_shading_map_size,
9041 sizeof(lens_shading_map_size)/sizeof(int32_t));
9042
9043 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9044 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9045
9046 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9047 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9048
9049 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9050 &gCamCapability[cameraId]->max_frame_duration, 1);
9051
9052 camera_metadata_rational baseGainFactor = {
9053 gCamCapability[cameraId]->base_gain_factor.numerator,
9054 gCamCapability[cameraId]->base_gain_factor.denominator};
9055 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9056 &baseGainFactor, 1);
9057
9058 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9059 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9060
9061 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9062 gCamCapability[cameraId]->pixel_array_size.height};
9063 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9064 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9065
9066 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9067 gCamCapability[cameraId]->active_array_size.top,
9068 gCamCapability[cameraId]->active_array_size.width,
9069 gCamCapability[cameraId]->active_array_size.height};
9070 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9071 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9072
9073 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9074 &gCamCapability[cameraId]->white_level, 1);
9075
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009076 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9077 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9078 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009079 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009080 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009081
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009082#ifndef USE_HAL_3_3
9083 bool hasBlackRegions = false;
9084 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9085 LOGW("black_region_count: %d is bounded to %d",
9086 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9087 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9088 }
9089 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9090 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9091 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9092 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9093 }
9094 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9095 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9096 hasBlackRegions = true;
9097 }
9098#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009099 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9100 &gCamCapability[cameraId]->flash_charge_duration, 1);
9101
9102 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9103 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9104
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009105 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9106 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9107 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009108 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9109 &timestampSource, 1);
9110
Thierry Strudel54dc9782017-02-15 12:12:10 -08009111 //update histogram vendor data
9112 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009113 &gCamCapability[cameraId]->histogram_size, 1);
9114
Thierry Strudel54dc9782017-02-15 12:12:10 -08009115 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009116 &gCamCapability[cameraId]->max_histogram_count, 1);
9117
Shuzhen Wang14415f52016-11-16 18:26:18 -08009118 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9119 //so that app can request fewer number of bins than the maximum supported.
9120 std::vector<int32_t> histBins;
9121 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9122 histBins.push_back(maxHistBins);
9123 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9124 (maxHistBins & 0x1) == 0) {
9125 histBins.push_back(maxHistBins >> 1);
9126 maxHistBins >>= 1;
9127 }
9128 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9129 histBins.data(), histBins.size());
9130
Thierry Strudel3d639192016-09-09 11:52:26 -07009131 int32_t sharpness_map_size[] = {
9132 gCamCapability[cameraId]->sharpness_map_size.width,
9133 gCamCapability[cameraId]->sharpness_map_size.height};
9134
9135 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9136 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9137
9138 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9139 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9140
Emilian Peev0f3c3162017-03-15 12:57:46 +00009141 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9142 if (0 <= indexPD) {
9143 // Advertise PD stats data as part of the Depth capabilities
9144 int32_t depthWidth =
9145 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9146 int32_t depthHeight =
9147 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9148 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9149 assert(0 < depthSamplesCount);
9150 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9151 &depthSamplesCount, 1);
9152
9153 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9154 depthHeight,
9155 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9156 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9157 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9158 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9159 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9160
9161 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9162 depthHeight, 33333333,
9163 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9164 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9165 depthMinDuration,
9166 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9167
9168 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9169 depthHeight, 0,
9170 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9171 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9172 depthStallDuration,
9173 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9174
9175 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9176 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9177 }
9178
Thierry Strudel3d639192016-09-09 11:52:26 -07009179 int32_t scalar_formats[] = {
9180 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9181 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9182 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9183 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9184 HAL_PIXEL_FORMAT_RAW10,
9185 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009186 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9187 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9188 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009189
9190 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9191 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9192 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9193 count, MAX_SIZES_CNT, available_processed_sizes);
9194 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9195 available_processed_sizes, count * 2);
9196
9197 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9198 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9199 makeTable(gCamCapability[cameraId]->raw_dim,
9200 count, MAX_SIZES_CNT, available_raw_sizes);
9201 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9202 available_raw_sizes, count * 2);
9203
9204 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9205 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9206 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9207 count, MAX_SIZES_CNT, available_fps_ranges);
9208 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9209 available_fps_ranges, count * 2);
9210
9211 camera_metadata_rational exposureCompensationStep = {
9212 gCamCapability[cameraId]->exp_compensation_step.numerator,
9213 gCamCapability[cameraId]->exp_compensation_step.denominator};
9214 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9215 &exposureCompensationStep, 1);
9216
9217 Vector<uint8_t> availableVstabModes;
9218 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9219 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009220 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009221 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009222 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009223 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009224 count = IS_TYPE_MAX;
9225 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9226 for (size_t i = 0; i < count; i++) {
9227 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9228 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9229 eisSupported = true;
9230 break;
9231 }
9232 }
9233 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009234 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9235 }
9236 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9237 availableVstabModes.array(), availableVstabModes.size());
9238
9239 /*HAL 1 and HAL 3 common*/
9240 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9241 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9242 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009243 // Cap the max zoom to the max preferred value
9244 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009245 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9246 &maxZoom, 1);
9247
9248 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9249 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9250
9251 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9252 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9253 max3aRegions[2] = 0; /* AF not supported */
9254 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9255 max3aRegions, 3);
9256
9257 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9258 memset(prop, 0, sizeof(prop));
9259 property_get("persist.camera.facedetect", prop, "1");
9260 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9261 LOGD("Support face detection mode: %d",
9262 supportedFaceDetectMode);
9263
9264 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009265 /* support mode should be OFF if max number of face is 0 */
9266 if (maxFaces <= 0) {
9267 supportedFaceDetectMode = 0;
9268 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009269 Vector<uint8_t> availableFaceDetectModes;
9270 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9271 if (supportedFaceDetectMode == 1) {
9272 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9273 } else if (supportedFaceDetectMode == 2) {
9274 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9275 } else if (supportedFaceDetectMode == 3) {
9276 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9277 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9278 } else {
9279 maxFaces = 0;
9280 }
9281 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9282 availableFaceDetectModes.array(),
9283 availableFaceDetectModes.size());
9284 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9285 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009286 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9287 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9288 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009289
9290 int32_t exposureCompensationRange[] = {
9291 gCamCapability[cameraId]->exposure_compensation_min,
9292 gCamCapability[cameraId]->exposure_compensation_max};
9293 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9294 exposureCompensationRange,
9295 sizeof(exposureCompensationRange)/sizeof(int32_t));
9296
9297 uint8_t lensFacing = (facingBack) ?
9298 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9299 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9300
9301 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9302 available_thumbnail_sizes,
9303 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9304
9305 /*all sizes will be clubbed into this tag*/
9306 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9307 /*android.scaler.availableStreamConfigurations*/
9308 Vector<int32_t> available_stream_configs;
9309 cam_dimension_t active_array_dim;
9310 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9311 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009312
9313 /*advertise list of input dimensions supported based on below property.
9314 By default all sizes upto 5MP will be advertised.
9315 Note that the setprop resolution format should be WxH.
9316 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9317 To list all supported sizes, setprop needs to be set with "0x0" */
9318 cam_dimension_t minInputSize = {2592,1944}; //5MP
9319 memset(prop, 0, sizeof(prop));
9320 property_get("persist.camera.input.minsize", prop, "2592x1944");
9321 if (strlen(prop) > 0) {
9322 char *saveptr = NULL;
9323 char *token = strtok_r(prop, "x", &saveptr);
9324 if (token != NULL) {
9325 minInputSize.width = atoi(token);
9326 }
9327 token = strtok_r(NULL, "x", &saveptr);
9328 if (token != NULL) {
9329 minInputSize.height = atoi(token);
9330 }
9331 }
9332
Thierry Strudel3d639192016-09-09 11:52:26 -07009333 /* Add input/output stream configurations for each scalar formats*/
9334 for (size_t j = 0; j < scalar_formats_count; j++) {
9335 switch (scalar_formats[j]) {
9336 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9337 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9338 case HAL_PIXEL_FORMAT_RAW10:
9339 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9340 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9341 addStreamConfig(available_stream_configs, scalar_formats[j],
9342 gCamCapability[cameraId]->raw_dim[i],
9343 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9344 }
9345 break;
9346 case HAL_PIXEL_FORMAT_BLOB:
9347 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9348 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9349 addStreamConfig(available_stream_configs, scalar_formats[j],
9350 gCamCapability[cameraId]->picture_sizes_tbl[i],
9351 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9352 }
9353 break;
9354 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9355 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9356 default:
9357 cam_dimension_t largest_picture_size;
9358 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9359 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9360 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9361 addStreamConfig(available_stream_configs, scalar_formats[j],
9362 gCamCapability[cameraId]->picture_sizes_tbl[i],
9363 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009364 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009365 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9366 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009367 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9368 >= minInputSize.width) || (gCamCapability[cameraId]->
9369 picture_sizes_tbl[i].height >= minInputSize.height)) {
9370 addStreamConfig(available_stream_configs, scalar_formats[j],
9371 gCamCapability[cameraId]->picture_sizes_tbl[i],
9372 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9373 }
9374 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009375 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009376
Thierry Strudel3d639192016-09-09 11:52:26 -07009377 break;
9378 }
9379 }
9380
9381 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9382 available_stream_configs.array(), available_stream_configs.size());
9383 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9384 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9385
9386 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9387 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9388
9389 /* android.scaler.availableMinFrameDurations */
9390 Vector<int64_t> available_min_durations;
9391 for (size_t j = 0; j < scalar_formats_count; j++) {
9392 switch (scalar_formats[j]) {
9393 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9394 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9395 case HAL_PIXEL_FORMAT_RAW10:
9396 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9397 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9398 available_min_durations.add(scalar_formats[j]);
9399 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9400 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9401 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9402 }
9403 break;
9404 default:
9405 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9406 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9407 available_min_durations.add(scalar_formats[j]);
9408 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9409 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9410 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9411 }
9412 break;
9413 }
9414 }
9415 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9416 available_min_durations.array(), available_min_durations.size());
9417
9418 Vector<int32_t> available_hfr_configs;
9419 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9420 int32_t fps = 0;
9421 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9422 case CAM_HFR_MODE_60FPS:
9423 fps = 60;
9424 break;
9425 case CAM_HFR_MODE_90FPS:
9426 fps = 90;
9427 break;
9428 case CAM_HFR_MODE_120FPS:
9429 fps = 120;
9430 break;
9431 case CAM_HFR_MODE_150FPS:
9432 fps = 150;
9433 break;
9434 case CAM_HFR_MODE_180FPS:
9435 fps = 180;
9436 break;
9437 case CAM_HFR_MODE_210FPS:
9438 fps = 210;
9439 break;
9440 case CAM_HFR_MODE_240FPS:
9441 fps = 240;
9442 break;
9443 case CAM_HFR_MODE_480FPS:
9444 fps = 480;
9445 break;
9446 case CAM_HFR_MODE_OFF:
9447 case CAM_HFR_MODE_MAX:
9448 default:
9449 break;
9450 }
9451
9452 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9453 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9454 /* For each HFR frame rate, need to advertise one variable fps range
9455 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9456 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9457 * set by the app. When video recording is started, [120, 120] is
9458 * set. This way sensor configuration does not change when recording
9459 * is started */
9460
9461 /* (width, height, fps_min, fps_max, batch_size_max) */
9462 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9463 j < MAX_SIZES_CNT; j++) {
9464 available_hfr_configs.add(
9465 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9466 available_hfr_configs.add(
9467 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9468 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9469 available_hfr_configs.add(fps);
9470 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9471
9472 /* (width, height, fps_min, fps_max, batch_size_max) */
9473 available_hfr_configs.add(
9474 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9475 available_hfr_configs.add(
9476 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9477 available_hfr_configs.add(fps);
9478 available_hfr_configs.add(fps);
9479 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9480 }
9481 }
9482 }
9483 //Advertise HFR capability only if the property is set
9484 memset(prop, 0, sizeof(prop));
9485 property_get("persist.camera.hal3hfr.enable", prop, "1");
9486 uint8_t hfrEnable = (uint8_t)atoi(prop);
9487
9488 if(hfrEnable && available_hfr_configs.array()) {
9489 staticInfo.update(
9490 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9491 available_hfr_configs.array(), available_hfr_configs.size());
9492 }
9493
9494 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9495 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9496 &max_jpeg_size, 1);
9497
9498 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9499 size_t size = 0;
9500 count = CAM_EFFECT_MODE_MAX;
9501 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9502 for (size_t i = 0; i < count; i++) {
9503 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9504 gCamCapability[cameraId]->supported_effects[i]);
9505 if (NAME_NOT_FOUND != val) {
9506 avail_effects[size] = (uint8_t)val;
9507 size++;
9508 }
9509 }
9510 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9511 avail_effects,
9512 size);
9513
9514 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9515 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9516 size_t supported_scene_modes_cnt = 0;
9517 count = CAM_SCENE_MODE_MAX;
9518 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9519 for (size_t i = 0; i < count; i++) {
9520 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9521 CAM_SCENE_MODE_OFF) {
9522 int val = lookupFwkName(SCENE_MODES_MAP,
9523 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9524 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009525
Thierry Strudel3d639192016-09-09 11:52:26 -07009526 if (NAME_NOT_FOUND != val) {
9527 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9528 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9529 supported_scene_modes_cnt++;
9530 }
9531 }
9532 }
9533 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9534 avail_scene_modes,
9535 supported_scene_modes_cnt);
9536
9537 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9538 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9539 supported_scene_modes_cnt,
9540 CAM_SCENE_MODE_MAX,
9541 scene_mode_overrides,
9542 supported_indexes,
9543 cameraId);
9544
9545 if (supported_scene_modes_cnt == 0) {
9546 supported_scene_modes_cnt = 1;
9547 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9548 }
9549
9550 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9551 scene_mode_overrides, supported_scene_modes_cnt * 3);
9552
9553 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9554 ANDROID_CONTROL_MODE_AUTO,
9555 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9556 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9557 available_control_modes,
9558 3);
9559
9560 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9561 size = 0;
9562 count = CAM_ANTIBANDING_MODE_MAX;
9563 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9564 for (size_t i = 0; i < count; i++) {
9565 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9566 gCamCapability[cameraId]->supported_antibandings[i]);
9567 if (NAME_NOT_FOUND != val) {
9568 avail_antibanding_modes[size] = (uint8_t)val;
9569 size++;
9570 }
9571
9572 }
9573 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9574 avail_antibanding_modes,
9575 size);
9576
9577 uint8_t avail_abberation_modes[] = {
9578 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9579 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9580 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9581 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9582 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9583 if (0 == count) {
9584 // If no aberration correction modes are available for a device, this advertise OFF mode
9585 size = 1;
9586 } else {
9587 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9588 // So, advertize all 3 modes if atleast any one mode is supported as per the
9589 // new M requirement
9590 size = 3;
9591 }
9592 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9593 avail_abberation_modes,
9594 size);
9595
9596 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9597 size = 0;
9598 count = CAM_FOCUS_MODE_MAX;
9599 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9600 for (size_t i = 0; i < count; i++) {
9601 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9602 gCamCapability[cameraId]->supported_focus_modes[i]);
9603 if (NAME_NOT_FOUND != val) {
9604 avail_af_modes[size] = (uint8_t)val;
9605 size++;
9606 }
9607 }
9608 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9609 avail_af_modes,
9610 size);
9611
9612 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9613 size = 0;
9614 count = CAM_WB_MODE_MAX;
9615 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9616 for (size_t i = 0; i < count; i++) {
9617 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9618 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9619 gCamCapability[cameraId]->supported_white_balances[i]);
9620 if (NAME_NOT_FOUND != val) {
9621 avail_awb_modes[size] = (uint8_t)val;
9622 size++;
9623 }
9624 }
9625 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9626 avail_awb_modes,
9627 size);
9628
9629 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9630 count = CAM_FLASH_FIRING_LEVEL_MAX;
9631 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9632 count);
9633 for (size_t i = 0; i < count; i++) {
9634 available_flash_levels[i] =
9635 gCamCapability[cameraId]->supported_firing_levels[i];
9636 }
9637 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9638 available_flash_levels, count);
9639
9640 uint8_t flashAvailable;
9641 if (gCamCapability[cameraId]->flash_available)
9642 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9643 else
9644 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9645 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9646 &flashAvailable, 1);
9647
9648 Vector<uint8_t> avail_ae_modes;
9649 count = CAM_AE_MODE_MAX;
9650 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9651 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009652 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9653 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9654 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9655 }
9656 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009657 }
9658 if (flashAvailable) {
9659 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9660 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9661 }
9662 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9663 avail_ae_modes.array(),
9664 avail_ae_modes.size());
9665
9666 int32_t sensitivity_range[2];
9667 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9668 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9669 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9670 sensitivity_range,
9671 sizeof(sensitivity_range) / sizeof(int32_t));
9672
9673 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9674 &gCamCapability[cameraId]->max_analog_sensitivity,
9675 1);
9676
9677 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9678 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9679 &sensor_orientation,
9680 1);
9681
9682 int32_t max_output_streams[] = {
9683 MAX_STALLING_STREAMS,
9684 MAX_PROCESSED_STREAMS,
9685 MAX_RAW_STREAMS};
9686 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9687 max_output_streams,
9688 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9689
9690 uint8_t avail_leds = 0;
9691 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9692 &avail_leds, 0);
9693
9694 uint8_t focus_dist_calibrated;
9695 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9696 gCamCapability[cameraId]->focus_dist_calibrated);
9697 if (NAME_NOT_FOUND != val) {
9698 focus_dist_calibrated = (uint8_t)val;
9699 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9700 &focus_dist_calibrated, 1);
9701 }
9702
9703 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9704 size = 0;
9705 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9706 MAX_TEST_PATTERN_CNT);
9707 for (size_t i = 0; i < count; i++) {
9708 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9709 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9710 if (NAME_NOT_FOUND != testpatternMode) {
9711 avail_testpattern_modes[size] = testpatternMode;
9712 size++;
9713 }
9714 }
9715 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9716 avail_testpattern_modes,
9717 size);
9718
9719 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9720 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9721 &max_pipeline_depth,
9722 1);
9723
9724 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9725 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9726 &partial_result_count,
9727 1);
9728
9729 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9730 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9731
9732 Vector<uint8_t> available_capabilities;
9733 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9734 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9735 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9736 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9737 if (supportBurst) {
9738 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9739 }
9740 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9741 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9742 if (hfrEnable && available_hfr_configs.array()) {
9743 available_capabilities.add(
9744 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9745 }
9746
9747 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9748 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9749 }
9750 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9751 available_capabilities.array(),
9752 available_capabilities.size());
9753
9754 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9755 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9756 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9757 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9758
9759 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9760 &aeLockAvailable, 1);
9761
9762 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9763 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9764 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9765 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9766
9767 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9768 &awbLockAvailable, 1);
9769
9770 int32_t max_input_streams = 1;
9771 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9772 &max_input_streams,
9773 1);
9774
9775 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9776 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9777 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9778 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9779 HAL_PIXEL_FORMAT_YCbCr_420_888};
9780 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9781 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9782
9783 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9784 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9785 &max_latency,
9786 1);
9787
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009788#ifndef USE_HAL_3_3
9789 int32_t isp_sensitivity_range[2];
9790 isp_sensitivity_range[0] =
9791 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9792 isp_sensitivity_range[1] =
9793 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9794 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9795 isp_sensitivity_range,
9796 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9797#endif
9798
Thierry Strudel3d639192016-09-09 11:52:26 -07009799 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9800 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9801 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9802 available_hot_pixel_modes,
9803 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9804
9805 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9806 ANDROID_SHADING_MODE_FAST,
9807 ANDROID_SHADING_MODE_HIGH_QUALITY};
9808 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9809 available_shading_modes,
9810 3);
9811
9812 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9813 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9814 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9815 available_lens_shading_map_modes,
9816 2);
9817
9818 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9819 ANDROID_EDGE_MODE_FAST,
9820 ANDROID_EDGE_MODE_HIGH_QUALITY,
9821 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9822 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9823 available_edge_modes,
9824 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9825
9826 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9827 ANDROID_NOISE_REDUCTION_MODE_FAST,
9828 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9829 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9830 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9831 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9832 available_noise_red_modes,
9833 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9834
9835 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9836 ANDROID_TONEMAP_MODE_FAST,
9837 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9838 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9839 available_tonemap_modes,
9840 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9841
9842 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9843 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9844 available_hot_pixel_map_modes,
9845 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9846
9847 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9848 gCamCapability[cameraId]->reference_illuminant1);
9849 if (NAME_NOT_FOUND != val) {
9850 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9851 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9852 }
9853
9854 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9855 gCamCapability[cameraId]->reference_illuminant2);
9856 if (NAME_NOT_FOUND != val) {
9857 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9858 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9859 }
9860
9861 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9862 (void *)gCamCapability[cameraId]->forward_matrix1,
9863 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9864
9865 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9866 (void *)gCamCapability[cameraId]->forward_matrix2,
9867 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9868
9869 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9870 (void *)gCamCapability[cameraId]->color_transform1,
9871 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9872
9873 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9874 (void *)gCamCapability[cameraId]->color_transform2,
9875 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9876
9877 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9878 (void *)gCamCapability[cameraId]->calibration_transform1,
9879 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9880
9881 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9882 (void *)gCamCapability[cameraId]->calibration_transform2,
9883 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9884
9885 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9886 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9887 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9888 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9889 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9890 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9891 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9892 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9893 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9894 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9895 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9896 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9897 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9898 ANDROID_JPEG_GPS_COORDINATES,
9899 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9900 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9901 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9902 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9903 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9904 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9905 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9906 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9907 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9908 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009909#ifndef USE_HAL_3_3
9910 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9911#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009912 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009913 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009914 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9915 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009916 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009917 /* DevCamDebug metadata request_keys_basic */
9918 DEVCAMDEBUG_META_ENABLE,
9919 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009920 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07009921 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07009922 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -07009923 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -08009924 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009925
9926 size_t request_keys_cnt =
9927 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9928 Vector<int32_t> available_request_keys;
9929 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9930 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9931 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9932 }
9933
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009934 if (gExposeEnableZslKey) {
Chien-Yu Chened0a4c92017-05-01 18:25:03 +00009935 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009936 }
9937
Thierry Strudel3d639192016-09-09 11:52:26 -07009938 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9939 available_request_keys.array(), available_request_keys.size());
9940
9941 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9942 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9943 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9944 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9945 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9946 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9947 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9948 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9949 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9950 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9951 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9952 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9953 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9954 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9955 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9956 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9957 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009958 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009959 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9960 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9961 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009962 ANDROID_STATISTICS_FACE_SCORES,
9963#ifndef USE_HAL_3_3
9964 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9965#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009966 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009967 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009968 // DevCamDebug metadata result_keys_basic
9969 DEVCAMDEBUG_META_ENABLE,
9970 // DevCamDebug metadata result_keys AF
9971 DEVCAMDEBUG_AF_LENS_POSITION,
9972 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9973 DEVCAMDEBUG_AF_TOF_DISTANCE,
9974 DEVCAMDEBUG_AF_LUMA,
9975 DEVCAMDEBUG_AF_HAF_STATE,
9976 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9977 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9978 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9979 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9980 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9981 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9982 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9983 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9984 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9985 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9986 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9987 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9988 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9989 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9990 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9991 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9992 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9993 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9994 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9995 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9996 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9997 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9998 // DevCamDebug metadata result_keys AEC
9999 DEVCAMDEBUG_AEC_TARGET_LUMA,
10000 DEVCAMDEBUG_AEC_COMP_LUMA,
10001 DEVCAMDEBUG_AEC_AVG_LUMA,
10002 DEVCAMDEBUG_AEC_CUR_LUMA,
10003 DEVCAMDEBUG_AEC_LINECOUNT,
10004 DEVCAMDEBUG_AEC_REAL_GAIN,
10005 DEVCAMDEBUG_AEC_EXP_INDEX,
10006 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010007 // DevCamDebug metadata result_keys zzHDR
10008 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10009 DEVCAMDEBUG_AEC_L_LINECOUNT,
10010 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10011 DEVCAMDEBUG_AEC_S_LINECOUNT,
10012 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10013 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10014 // DevCamDebug metadata result_keys ADRC
10015 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10016 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10017 DEVCAMDEBUG_AEC_GTM_RATIO,
10018 DEVCAMDEBUG_AEC_LTM_RATIO,
10019 DEVCAMDEBUG_AEC_LA_RATIO,
10020 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010021 // DevCamDebug metadata result_keys AWB
10022 DEVCAMDEBUG_AWB_R_GAIN,
10023 DEVCAMDEBUG_AWB_G_GAIN,
10024 DEVCAMDEBUG_AWB_B_GAIN,
10025 DEVCAMDEBUG_AWB_CCT,
10026 DEVCAMDEBUG_AWB_DECISION,
10027 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010028 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10029 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10030 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010031 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010032 };
10033
Thierry Strudel3d639192016-09-09 11:52:26 -070010034 size_t result_keys_cnt =
10035 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10036
10037 Vector<int32_t> available_result_keys;
10038 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10039 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10040 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10041 }
10042 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10043 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10044 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10045 }
10046 if (supportedFaceDetectMode == 1) {
10047 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10048 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10049 } else if ((supportedFaceDetectMode == 2) ||
10050 (supportedFaceDetectMode == 3)) {
10051 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10052 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10053 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010054#ifndef USE_HAL_3_3
10055 if (hasBlackRegions) {
10056 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10057 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10058 }
10059#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010060
10061 if (gExposeEnableZslKey) {
10062 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10063 }
10064
Thierry Strudel3d639192016-09-09 11:52:26 -070010065 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10066 available_result_keys.array(), available_result_keys.size());
10067
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010068 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010069 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10070 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10071 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10072 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10073 ANDROID_SCALER_CROPPING_TYPE,
10074 ANDROID_SYNC_MAX_LATENCY,
10075 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10076 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10077 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10078 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10079 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10080 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10081 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10082 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10083 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10084 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10085 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10086 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10087 ANDROID_LENS_FACING,
10088 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10089 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10090 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10091 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10092 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10093 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10094 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10095 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10096 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10097 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10098 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10099 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10100 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10101 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10102 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10103 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10104 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10105 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10106 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10107 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010108 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010109 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10110 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10111 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10112 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10113 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10114 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10115 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10116 ANDROID_CONTROL_AVAILABLE_MODES,
10117 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10118 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10119 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10120 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010121 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10122#ifndef USE_HAL_3_3
10123 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10124 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10125#endif
10126 };
10127
10128 Vector<int32_t> available_characteristics_keys;
10129 available_characteristics_keys.appendArray(characteristics_keys_basic,
10130 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10131#ifndef USE_HAL_3_3
10132 if (hasBlackRegions) {
10133 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10134 }
10135#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010136
10137 if (0 <= indexPD) {
10138 int32_t depthKeys[] = {
10139 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10140 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10141 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10142 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10143 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10144 };
10145 available_characteristics_keys.appendArray(depthKeys,
10146 sizeof(depthKeys) / sizeof(depthKeys[0]));
10147 }
10148
Thierry Strudel3d639192016-09-09 11:52:26 -070010149 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010150 available_characteristics_keys.array(),
10151 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010152
10153 /*available stall durations depend on the hw + sw and will be different for different devices */
10154 /*have to add for raw after implementation*/
10155 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10156 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10157
10158 Vector<int64_t> available_stall_durations;
10159 for (uint32_t j = 0; j < stall_formats_count; j++) {
10160 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10161 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10162 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10163 available_stall_durations.add(stall_formats[j]);
10164 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10165 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10166 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10167 }
10168 } else {
10169 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10170 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10171 available_stall_durations.add(stall_formats[j]);
10172 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10173 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10174 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10175 }
10176 }
10177 }
10178 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10179 available_stall_durations.array(),
10180 available_stall_durations.size());
10181
10182 //QCAMERA3_OPAQUE_RAW
10183 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10184 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10185 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10186 case LEGACY_RAW:
10187 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10188 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10189 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10190 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10191 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10192 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10193 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10194 break;
10195 case MIPI_RAW:
10196 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10197 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10198 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10199 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10200 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10201 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10202 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10203 break;
10204 default:
10205 LOGE("unknown opaque_raw_format %d",
10206 gCamCapability[cameraId]->opaque_raw_fmt);
10207 break;
10208 }
10209 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10210
10211 Vector<int32_t> strides;
10212 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10213 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10214 cam_stream_buf_plane_info_t buf_planes;
10215 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10216 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10217 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10218 &gCamCapability[cameraId]->padding_info, &buf_planes);
10219 strides.add(buf_planes.plane_info.mp[0].stride);
10220 }
10221 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10222 strides.size());
10223
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010224 //TBD: remove the following line once backend advertises zzHDR in feature mask
10225 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010226 //Video HDR default
10227 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10228 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010229 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010230 int32_t vhdr_mode[] = {
10231 QCAMERA3_VIDEO_HDR_MODE_OFF,
10232 QCAMERA3_VIDEO_HDR_MODE_ON};
10233
10234 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10235 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10236 vhdr_mode, vhdr_mode_count);
10237 }
10238
Thierry Strudel3d639192016-09-09 11:52:26 -070010239 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10240 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10241 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10242
10243 uint8_t isMonoOnly =
10244 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10245 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10246 &isMonoOnly, 1);
10247
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010248#ifndef USE_HAL_3_3
10249 Vector<int32_t> opaque_size;
10250 for (size_t j = 0; j < scalar_formats_count; j++) {
10251 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10252 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10253 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10254 cam_stream_buf_plane_info_t buf_planes;
10255
10256 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10257 &gCamCapability[cameraId]->padding_info, &buf_planes);
10258
10259 if (rc == 0) {
10260 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10261 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10262 opaque_size.add(buf_planes.plane_info.frame_len);
10263 }else {
10264 LOGE("raw frame calculation failed!");
10265 }
10266 }
10267 }
10268 }
10269
10270 if ((opaque_size.size() > 0) &&
10271 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10272 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10273 else
10274 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10275#endif
10276
Thierry Strudel04e026f2016-10-10 11:27:36 -070010277 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10278 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10279 size = 0;
10280 count = CAM_IR_MODE_MAX;
10281 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10282 for (size_t i = 0; i < count; i++) {
10283 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10284 gCamCapability[cameraId]->supported_ir_modes[i]);
10285 if (NAME_NOT_FOUND != val) {
10286 avail_ir_modes[size] = (int32_t)val;
10287 size++;
10288 }
10289 }
10290 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10291 avail_ir_modes, size);
10292 }
10293
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010294 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10295 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10296 size = 0;
10297 count = CAM_AEC_CONVERGENCE_MAX;
10298 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10299 for (size_t i = 0; i < count; i++) {
10300 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10301 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10302 if (NAME_NOT_FOUND != val) {
10303 available_instant_aec_modes[size] = (int32_t)val;
10304 size++;
10305 }
10306 }
10307 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10308 available_instant_aec_modes, size);
10309 }
10310
Thierry Strudel54dc9782017-02-15 12:12:10 -080010311 int32_t sharpness_range[] = {
10312 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10313 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10314 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10315
10316 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10317 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10318 size = 0;
10319 count = CAM_BINNING_CORRECTION_MODE_MAX;
10320 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10321 for (size_t i = 0; i < count; i++) {
10322 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10323 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10324 gCamCapability[cameraId]->supported_binning_modes[i]);
10325 if (NAME_NOT_FOUND != val) {
10326 avail_binning_modes[size] = (int32_t)val;
10327 size++;
10328 }
10329 }
10330 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10331 avail_binning_modes, size);
10332 }
10333
10334 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10335 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10336 size = 0;
10337 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10338 for (size_t i = 0; i < count; i++) {
10339 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10340 gCamCapability[cameraId]->supported_aec_modes[i]);
10341 if (NAME_NOT_FOUND != val)
10342 available_aec_modes[size++] = val;
10343 }
10344 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10345 available_aec_modes, size);
10346 }
10347
10348 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10349 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10350 size = 0;
10351 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10352 for (size_t i = 0; i < count; i++) {
10353 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10354 gCamCapability[cameraId]->supported_iso_modes[i]);
10355 if (NAME_NOT_FOUND != val)
10356 available_iso_modes[size++] = val;
10357 }
10358 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10359 available_iso_modes, size);
10360 }
10361
10362 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010363 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010364 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10365 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10366 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10367
10368 int32_t available_saturation_range[4];
10369 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10370 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10371 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10372 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10373 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10374 available_saturation_range, 4);
10375
10376 uint8_t is_hdr_values[2];
10377 is_hdr_values[0] = 0;
10378 is_hdr_values[1] = 1;
10379 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10380 is_hdr_values, 2);
10381
10382 float is_hdr_confidence_range[2];
10383 is_hdr_confidence_range[0] = 0.0;
10384 is_hdr_confidence_range[1] = 1.0;
10385 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10386 is_hdr_confidence_range, 2);
10387
Emilian Peev0a972ef2017-03-16 10:25:53 +000010388 size_t eepromLength = strnlen(
10389 reinterpret_cast<const char *>(
10390 gCamCapability[cameraId]->eeprom_version_info),
10391 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10392 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010393 char easelInfo[] = ",E:N";
10394 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10395 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10396 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010397 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10398 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010399 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010400 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10401 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10402 }
10403
Thierry Strudel3d639192016-09-09 11:52:26 -070010404 gStaticMetadata[cameraId] = staticInfo.release();
10405 return rc;
10406}
10407
10408/*===========================================================================
10409 * FUNCTION : makeTable
10410 *
10411 * DESCRIPTION: make a table of sizes
10412 *
10413 * PARAMETERS :
10414 *
10415 *
10416 *==========================================================================*/
10417void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10418 size_t max_size, int32_t *sizeTable)
10419{
10420 size_t j = 0;
10421 if (size > max_size) {
10422 size = max_size;
10423 }
10424 for (size_t i = 0; i < size; i++) {
10425 sizeTable[j] = dimTable[i].width;
10426 sizeTable[j+1] = dimTable[i].height;
10427 j+=2;
10428 }
10429}
10430
10431/*===========================================================================
10432 * FUNCTION : makeFPSTable
10433 *
10434 * DESCRIPTION: make a table of fps ranges
10435 *
10436 * PARAMETERS :
10437 *
10438 *==========================================================================*/
10439void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10440 size_t max_size, int32_t *fpsRangesTable)
10441{
10442 size_t j = 0;
10443 if (size > max_size) {
10444 size = max_size;
10445 }
10446 for (size_t i = 0; i < size; i++) {
10447 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10448 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10449 j+=2;
10450 }
10451}
10452
10453/*===========================================================================
10454 * FUNCTION : makeOverridesList
10455 *
10456 * DESCRIPTION: make a list of scene mode overrides
10457 *
10458 * PARAMETERS :
10459 *
10460 *
10461 *==========================================================================*/
10462void QCamera3HardwareInterface::makeOverridesList(
10463 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10464 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10465{
10466 /*daemon will give a list of overrides for all scene modes.
10467 However we should send the fwk only the overrides for the scene modes
10468 supported by the framework*/
10469 size_t j = 0;
10470 if (size > max_size) {
10471 size = max_size;
10472 }
10473 size_t focus_count = CAM_FOCUS_MODE_MAX;
10474 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10475 focus_count);
10476 for (size_t i = 0; i < size; i++) {
10477 bool supt = false;
10478 size_t index = supported_indexes[i];
10479 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10480 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10481 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10482 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10483 overridesTable[index].awb_mode);
10484 if (NAME_NOT_FOUND != val) {
10485 overridesList[j+1] = (uint8_t)val;
10486 }
10487 uint8_t focus_override = overridesTable[index].af_mode;
10488 for (size_t k = 0; k < focus_count; k++) {
10489 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10490 supt = true;
10491 break;
10492 }
10493 }
10494 if (supt) {
10495 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10496 focus_override);
10497 if (NAME_NOT_FOUND != val) {
10498 overridesList[j+2] = (uint8_t)val;
10499 }
10500 } else {
10501 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10502 }
10503 j+=3;
10504 }
10505}
10506
10507/*===========================================================================
10508 * FUNCTION : filterJpegSizes
10509 *
10510 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10511 * could be downscaled to
10512 *
10513 * PARAMETERS :
10514 *
10515 * RETURN : length of jpegSizes array
10516 *==========================================================================*/
10517
10518size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10519 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10520 uint8_t downscale_factor)
10521{
10522 if (0 == downscale_factor) {
10523 downscale_factor = 1;
10524 }
10525
10526 int32_t min_width = active_array_size.width / downscale_factor;
10527 int32_t min_height = active_array_size.height / downscale_factor;
10528 size_t jpegSizesCnt = 0;
10529 if (processedSizesCnt > maxCount) {
10530 processedSizesCnt = maxCount;
10531 }
10532 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10533 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10534 jpegSizes[jpegSizesCnt] = processedSizes[i];
10535 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10536 jpegSizesCnt += 2;
10537 }
10538 }
10539 return jpegSizesCnt;
10540}
10541
10542/*===========================================================================
10543 * FUNCTION : computeNoiseModelEntryS
10544 *
10545 * DESCRIPTION: function to map a given sensitivity to the S noise
10546 * model parameters in the DNG noise model.
10547 *
10548 * PARAMETERS : sens : the sensor sensitivity
10549 *
10550 ** RETURN : S (sensor amplification) noise
10551 *
10552 *==========================================================================*/
10553double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10554 double s = gCamCapability[mCameraId]->gradient_S * sens +
10555 gCamCapability[mCameraId]->offset_S;
10556 return ((s < 0.0) ? 0.0 : s);
10557}
10558
10559/*===========================================================================
10560 * FUNCTION : computeNoiseModelEntryO
10561 *
10562 * DESCRIPTION: function to map a given sensitivity to the O noise
10563 * model parameters in the DNG noise model.
10564 *
10565 * PARAMETERS : sens : the sensor sensitivity
10566 *
10567 ** RETURN : O (sensor readout) noise
10568 *
10569 *==========================================================================*/
10570double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10571 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10572 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10573 1.0 : (1.0 * sens / max_analog_sens);
10574 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10575 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10576 return ((o < 0.0) ? 0.0 : o);
10577}
10578
10579/*===========================================================================
10580 * FUNCTION : getSensorSensitivity
10581 *
10582 * DESCRIPTION: convert iso_mode to an integer value
10583 *
10584 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10585 *
10586 ** RETURN : sensitivity supported by sensor
10587 *
10588 *==========================================================================*/
10589int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10590{
10591 int32_t sensitivity;
10592
10593 switch (iso_mode) {
10594 case CAM_ISO_MODE_100:
10595 sensitivity = 100;
10596 break;
10597 case CAM_ISO_MODE_200:
10598 sensitivity = 200;
10599 break;
10600 case CAM_ISO_MODE_400:
10601 sensitivity = 400;
10602 break;
10603 case CAM_ISO_MODE_800:
10604 sensitivity = 800;
10605 break;
10606 case CAM_ISO_MODE_1600:
10607 sensitivity = 1600;
10608 break;
10609 default:
10610 sensitivity = -1;
10611 break;
10612 }
10613 return sensitivity;
10614}
10615
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010616int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010617 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010618 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10619 // to connect to Easel.
10620 bool doNotpowerOnEasel =
10621 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10622
10623 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010624 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10625 return OK;
10626 }
10627
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010628 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010629 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010630 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010631 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010632 return res;
10633 }
10634
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010635 EaselManagerClientOpened = true;
10636
10637 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010638 if (res != OK) {
10639 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10640 }
10641
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010642 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010643 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010644
10645 // Expose enableZsl key only when HDR+ mode is enabled.
10646 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010647 }
10648
10649 return OK;
10650}
10651
Thierry Strudel3d639192016-09-09 11:52:26 -070010652/*===========================================================================
10653 * FUNCTION : getCamInfo
10654 *
10655 * DESCRIPTION: query camera capabilities
10656 *
10657 * PARAMETERS :
10658 * @cameraId : camera Id
10659 * @info : camera info struct to be filled in with camera capabilities
10660 *
10661 * RETURN : int type of status
10662 * NO_ERROR -- success
10663 * none-zero failure code
10664 *==========================================================================*/
10665int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10666 struct camera_info *info)
10667{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010668 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010669 int rc = 0;
10670
10671 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010672
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010673 {
10674 Mutex::Autolock l(gHdrPlusClientLock);
10675 rc = initHdrPlusClientLocked();
10676 if (rc != OK) {
10677 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10678 pthread_mutex_unlock(&gCamLock);
10679 return rc;
10680 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010681 }
10682
Thierry Strudel3d639192016-09-09 11:52:26 -070010683 if (NULL == gCamCapability[cameraId]) {
10684 rc = initCapabilities(cameraId);
10685 if (rc < 0) {
10686 pthread_mutex_unlock(&gCamLock);
10687 return rc;
10688 }
10689 }
10690
10691 if (NULL == gStaticMetadata[cameraId]) {
10692 rc = initStaticMetadata(cameraId);
10693 if (rc < 0) {
10694 pthread_mutex_unlock(&gCamLock);
10695 return rc;
10696 }
10697 }
10698
10699 switch(gCamCapability[cameraId]->position) {
10700 case CAM_POSITION_BACK:
10701 case CAM_POSITION_BACK_AUX:
10702 info->facing = CAMERA_FACING_BACK;
10703 break;
10704
10705 case CAM_POSITION_FRONT:
10706 case CAM_POSITION_FRONT_AUX:
10707 info->facing = CAMERA_FACING_FRONT;
10708 break;
10709
10710 default:
10711 LOGE("Unknown position type %d for camera id:%d",
10712 gCamCapability[cameraId]->position, cameraId);
10713 rc = -1;
10714 break;
10715 }
10716
10717
10718 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010719#ifndef USE_HAL_3_3
10720 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10721#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010722 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010723#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010724 info->static_camera_characteristics = gStaticMetadata[cameraId];
10725
10726 //For now assume both cameras can operate independently.
10727 info->conflicting_devices = NULL;
10728 info->conflicting_devices_length = 0;
10729
10730 //resource cost is 100 * MIN(1.0, m/M),
10731 //where m is throughput requirement with maximum stream configuration
10732 //and M is CPP maximum throughput.
10733 float max_fps = 0.0;
10734 for (uint32_t i = 0;
10735 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10736 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10737 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10738 }
10739 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10740 gCamCapability[cameraId]->active_array_size.width *
10741 gCamCapability[cameraId]->active_array_size.height * max_fps /
10742 gCamCapability[cameraId]->max_pixel_bandwidth;
10743 info->resource_cost = 100 * MIN(1.0, ratio);
10744 LOGI("camera %d resource cost is %d", cameraId,
10745 info->resource_cost);
10746
10747 pthread_mutex_unlock(&gCamLock);
10748 return rc;
10749}
10750
10751/*===========================================================================
10752 * FUNCTION : translateCapabilityToMetadata
10753 *
10754 * DESCRIPTION: translate the capability into camera_metadata_t
10755 *
10756 * PARAMETERS : type of the request
10757 *
10758 *
10759 * RETURN : success: camera_metadata_t*
10760 * failure: NULL
10761 *
10762 *==========================================================================*/
10763camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10764{
10765 if (mDefaultMetadata[type] != NULL) {
10766 return mDefaultMetadata[type];
10767 }
10768 //first time we are handling this request
10769 //fill up the metadata structure using the wrapper class
10770 CameraMetadata settings;
10771 //translate from cam_capability_t to camera_metadata_tag_t
10772 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10773 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10774 int32_t defaultRequestID = 0;
10775 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10776
10777 /* OIS disable */
10778 char ois_prop[PROPERTY_VALUE_MAX];
10779 memset(ois_prop, 0, sizeof(ois_prop));
10780 property_get("persist.camera.ois.disable", ois_prop, "0");
10781 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10782
10783 /* Force video to use OIS */
10784 char videoOisProp[PROPERTY_VALUE_MAX];
10785 memset(videoOisProp, 0, sizeof(videoOisProp));
10786 property_get("persist.camera.ois.video", videoOisProp, "1");
10787 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010788
10789 // Hybrid AE enable/disable
10790 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10791 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10792 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10793 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10794
Thierry Strudel3d639192016-09-09 11:52:26 -070010795 uint8_t controlIntent = 0;
10796 uint8_t focusMode;
10797 uint8_t vsMode;
10798 uint8_t optStabMode;
10799 uint8_t cacMode;
10800 uint8_t edge_mode;
10801 uint8_t noise_red_mode;
10802 uint8_t tonemap_mode;
10803 bool highQualityModeEntryAvailable = FALSE;
10804 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010805 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010806 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10807 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010808 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010809 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010810 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010811
Thierry Strudel3d639192016-09-09 11:52:26 -070010812 switch (type) {
10813 case CAMERA3_TEMPLATE_PREVIEW:
10814 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10815 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10816 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10817 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10818 edge_mode = ANDROID_EDGE_MODE_FAST;
10819 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10820 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10821 break;
10822 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10823 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10824 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10825 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10826 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10827 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10828 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10829 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10830 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10831 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10832 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10833 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10834 highQualityModeEntryAvailable = TRUE;
10835 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10836 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10837 fastModeEntryAvailable = TRUE;
10838 }
10839 }
10840 if (highQualityModeEntryAvailable) {
10841 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10842 } else if (fastModeEntryAvailable) {
10843 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10844 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010845 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10846 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10847 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010848 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010849 break;
10850 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10851 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10852 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10853 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010854 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10855 edge_mode = ANDROID_EDGE_MODE_FAST;
10856 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10857 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10858 if (forceVideoOis)
10859 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10860 break;
10861 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10862 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10863 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10864 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010865 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10866 edge_mode = ANDROID_EDGE_MODE_FAST;
10867 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10868 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10869 if (forceVideoOis)
10870 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10871 break;
10872 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10873 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10874 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10875 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10876 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10877 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10878 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10879 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10880 break;
10881 case CAMERA3_TEMPLATE_MANUAL:
10882 edge_mode = ANDROID_EDGE_MODE_FAST;
10883 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10884 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10885 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10886 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10887 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10888 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10889 break;
10890 default:
10891 edge_mode = ANDROID_EDGE_MODE_FAST;
10892 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10893 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10894 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10895 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10896 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10897 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10898 break;
10899 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010900 // Set CAC to OFF if underlying device doesn't support
10901 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10902 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10903 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010904 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10905 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10906 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10907 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10908 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10909 }
10910 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010911 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010912 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010913
10914 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10915 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10916 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10917 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10918 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10919 || ois_disable)
10920 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10921 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010922 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010923
10924 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10925 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10926
10927 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10928 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10929
10930 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10931 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10932
10933 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10934 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10935
10936 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10937 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10938
10939 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10940 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10941
10942 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10943 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10944
10945 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10946 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10947
10948 /*flash*/
10949 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10950 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10951
10952 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10953 settings.update(ANDROID_FLASH_FIRING_POWER,
10954 &flashFiringLevel, 1);
10955
10956 /* lens */
10957 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10958 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10959
10960 if (gCamCapability[mCameraId]->filter_densities_count) {
10961 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10962 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10963 gCamCapability[mCameraId]->filter_densities_count);
10964 }
10965
10966 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10967 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10968
Thierry Strudel3d639192016-09-09 11:52:26 -070010969 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10970 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10971
10972 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10973 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10974
10975 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10976 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10977
10978 /* face detection (default to OFF) */
10979 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10980 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10981
Thierry Strudel54dc9782017-02-15 12:12:10 -080010982 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10983 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010984
10985 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10986 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10987
10988 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10989 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10990
Thierry Strudel3d639192016-09-09 11:52:26 -070010991
10992 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10993 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
10994
10995 /* Exposure time(Update the Min Exposure Time)*/
10996 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
10997 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
10998
10999 /* frame duration */
11000 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11001 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11002
11003 /* sensitivity */
11004 static const int32_t default_sensitivity = 100;
11005 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011006#ifndef USE_HAL_3_3
11007 static const int32_t default_isp_sensitivity =
11008 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11009 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11010#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011011
11012 /*edge mode*/
11013 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11014
11015 /*noise reduction mode*/
11016 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11017
11018 /*color correction mode*/
11019 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11020 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11021
11022 /*transform matrix mode*/
11023 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11024
11025 int32_t scaler_crop_region[4];
11026 scaler_crop_region[0] = 0;
11027 scaler_crop_region[1] = 0;
11028 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11029 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11030 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11031
11032 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11033 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11034
11035 /*focus distance*/
11036 float focus_distance = 0.0;
11037 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11038
11039 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011040 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011041 float max_range = 0.0;
11042 float max_fixed_fps = 0.0;
11043 int32_t fps_range[2] = {0, 0};
11044 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11045 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011046 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11047 TEMPLATE_MAX_PREVIEW_FPS) {
11048 continue;
11049 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011050 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11051 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11052 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11053 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11054 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11055 if (range > max_range) {
11056 fps_range[0] =
11057 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11058 fps_range[1] =
11059 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11060 max_range = range;
11061 }
11062 } else {
11063 if (range < 0.01 && max_fixed_fps <
11064 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11065 fps_range[0] =
11066 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11067 fps_range[1] =
11068 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11069 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11070 }
11071 }
11072 }
11073 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11074
11075 /*precapture trigger*/
11076 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11077 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11078
11079 /*af trigger*/
11080 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11081 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11082
11083 /* ae & af regions */
11084 int32_t active_region[] = {
11085 gCamCapability[mCameraId]->active_array_size.left,
11086 gCamCapability[mCameraId]->active_array_size.top,
11087 gCamCapability[mCameraId]->active_array_size.left +
11088 gCamCapability[mCameraId]->active_array_size.width,
11089 gCamCapability[mCameraId]->active_array_size.top +
11090 gCamCapability[mCameraId]->active_array_size.height,
11091 0};
11092 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11093 sizeof(active_region) / sizeof(active_region[0]));
11094 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11095 sizeof(active_region) / sizeof(active_region[0]));
11096
11097 /* black level lock */
11098 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11099 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11100
Thierry Strudel3d639192016-09-09 11:52:26 -070011101 //special defaults for manual template
11102 if (type == CAMERA3_TEMPLATE_MANUAL) {
11103 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11104 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11105
11106 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11107 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11108
11109 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11110 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11111
11112 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11113 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11114
11115 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11116 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11117
11118 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11119 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11120 }
11121
11122
11123 /* TNR
11124 * We'll use this location to determine which modes TNR will be set.
11125 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11126 * This is not to be confused with linking on a per stream basis that decision
11127 * is still on per-session basis and will be handled as part of config stream
11128 */
11129 uint8_t tnr_enable = 0;
11130
11131 if (m_bTnrPreview || m_bTnrVideo) {
11132
11133 switch (type) {
11134 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11135 tnr_enable = 1;
11136 break;
11137
11138 default:
11139 tnr_enable = 0;
11140 break;
11141 }
11142
11143 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11144 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11145 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11146
11147 LOGD("TNR:%d with process plate %d for template:%d",
11148 tnr_enable, tnr_process_type, type);
11149 }
11150
11151 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011152 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011153 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11154
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011155 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011156 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11157
Shuzhen Wang920ea402017-05-03 08:49:39 -070011158 uint8_t related_camera_id = mCameraId;
11159 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011160
11161 /* CDS default */
11162 char prop[PROPERTY_VALUE_MAX];
11163 memset(prop, 0, sizeof(prop));
11164 property_get("persist.camera.CDS", prop, "Auto");
11165 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11166 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11167 if (CAM_CDS_MODE_MAX == cds_mode) {
11168 cds_mode = CAM_CDS_MODE_AUTO;
11169 }
11170
11171 /* Disabling CDS in templates which have TNR enabled*/
11172 if (tnr_enable)
11173 cds_mode = CAM_CDS_MODE_OFF;
11174
11175 int32_t mode = cds_mode;
11176 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011177
Thierry Strudel269c81a2016-10-12 12:13:59 -070011178 /* Manual Convergence AEC Speed is disabled by default*/
11179 float default_aec_speed = 0;
11180 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11181
11182 /* Manual Convergence AWB Speed is disabled by default*/
11183 float default_awb_speed = 0;
11184 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11185
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011186 // Set instant AEC to normal convergence by default
11187 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11188 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11189
Shuzhen Wang19463d72016-03-08 11:09:52 -080011190 /* hybrid ae */
11191 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11192
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011193 if (gExposeEnableZslKey) {
11194 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11195 }
11196
Thierry Strudel3d639192016-09-09 11:52:26 -070011197 mDefaultMetadata[type] = settings.release();
11198
11199 return mDefaultMetadata[type];
11200}
11201
11202/*===========================================================================
11203 * FUNCTION : setFrameParameters
11204 *
11205 * DESCRIPTION: set parameters per frame as requested in the metadata from
11206 * framework
11207 *
11208 * PARAMETERS :
11209 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011210 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011211 * @blob_request: Whether this request is a blob request or not
11212 *
11213 * RETURN : success: NO_ERROR
11214 * failure:
11215 *==========================================================================*/
11216int QCamera3HardwareInterface::setFrameParameters(
11217 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011218 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011219 int blob_request,
11220 uint32_t snapshotStreamId)
11221{
11222 /*translate from camera_metadata_t type to parm_type_t*/
11223 int rc = 0;
11224 int32_t hal_version = CAM_HAL_V3;
11225
11226 clear_metadata_buffer(mParameters);
11227 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11228 LOGE("Failed to set hal version in the parameters");
11229 return BAD_VALUE;
11230 }
11231
11232 /*we need to update the frame number in the parameters*/
11233 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11234 request->frame_number)) {
11235 LOGE("Failed to set the frame number in the parameters");
11236 return BAD_VALUE;
11237 }
11238
11239 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011240 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011241 LOGE("Failed to set stream type mask in the parameters");
11242 return BAD_VALUE;
11243 }
11244
11245 if (mUpdateDebugLevel) {
11246 uint32_t dummyDebugLevel = 0;
11247 /* The value of dummyDebugLevel is irrelavent. On
11248 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11249 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11250 dummyDebugLevel)) {
11251 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11252 return BAD_VALUE;
11253 }
11254 mUpdateDebugLevel = false;
11255 }
11256
11257 if(request->settings != NULL){
11258 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11259 if (blob_request)
11260 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11261 }
11262
11263 return rc;
11264}
11265
11266/*===========================================================================
11267 * FUNCTION : setReprocParameters
11268 *
11269 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11270 * return it.
11271 *
11272 * PARAMETERS :
11273 * @request : request that needs to be serviced
11274 *
11275 * RETURN : success: NO_ERROR
11276 * failure:
11277 *==========================================================================*/
11278int32_t QCamera3HardwareInterface::setReprocParameters(
11279 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11280 uint32_t snapshotStreamId)
11281{
11282 /*translate from camera_metadata_t type to parm_type_t*/
11283 int rc = 0;
11284
11285 if (NULL == request->settings){
11286 LOGE("Reprocess settings cannot be NULL");
11287 return BAD_VALUE;
11288 }
11289
11290 if (NULL == reprocParam) {
11291 LOGE("Invalid reprocessing metadata buffer");
11292 return BAD_VALUE;
11293 }
11294 clear_metadata_buffer(reprocParam);
11295
11296 /*we need to update the frame number in the parameters*/
11297 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11298 request->frame_number)) {
11299 LOGE("Failed to set the frame number in the parameters");
11300 return BAD_VALUE;
11301 }
11302
11303 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11304 if (rc < 0) {
11305 LOGE("Failed to translate reproc request");
11306 return rc;
11307 }
11308
11309 CameraMetadata frame_settings;
11310 frame_settings = request->settings;
11311 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11312 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11313 int32_t *crop_count =
11314 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11315 int32_t *crop_data =
11316 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11317 int32_t *roi_map =
11318 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11319 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11320 cam_crop_data_t crop_meta;
11321 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11322 crop_meta.num_of_streams = 1;
11323 crop_meta.crop_info[0].crop.left = crop_data[0];
11324 crop_meta.crop_info[0].crop.top = crop_data[1];
11325 crop_meta.crop_info[0].crop.width = crop_data[2];
11326 crop_meta.crop_info[0].crop.height = crop_data[3];
11327
11328 crop_meta.crop_info[0].roi_map.left =
11329 roi_map[0];
11330 crop_meta.crop_info[0].roi_map.top =
11331 roi_map[1];
11332 crop_meta.crop_info[0].roi_map.width =
11333 roi_map[2];
11334 crop_meta.crop_info[0].roi_map.height =
11335 roi_map[3];
11336
11337 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11338 rc = BAD_VALUE;
11339 }
11340 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11341 request->input_buffer->stream,
11342 crop_meta.crop_info[0].crop.left,
11343 crop_meta.crop_info[0].crop.top,
11344 crop_meta.crop_info[0].crop.width,
11345 crop_meta.crop_info[0].crop.height);
11346 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11347 request->input_buffer->stream,
11348 crop_meta.crop_info[0].roi_map.left,
11349 crop_meta.crop_info[0].roi_map.top,
11350 crop_meta.crop_info[0].roi_map.width,
11351 crop_meta.crop_info[0].roi_map.height);
11352 } else {
11353 LOGE("Invalid reprocess crop count %d!", *crop_count);
11354 }
11355 } else {
11356 LOGE("No crop data from matching output stream");
11357 }
11358
11359 /* These settings are not needed for regular requests so handle them specially for
11360 reprocess requests; information needed for EXIF tags */
11361 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11362 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11363 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11364 if (NAME_NOT_FOUND != val) {
11365 uint32_t flashMode = (uint32_t)val;
11366 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11367 rc = BAD_VALUE;
11368 }
11369 } else {
11370 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11371 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11372 }
11373 } else {
11374 LOGH("No flash mode in reprocess settings");
11375 }
11376
11377 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11378 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11379 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11380 rc = BAD_VALUE;
11381 }
11382 } else {
11383 LOGH("No flash state in reprocess settings");
11384 }
11385
11386 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11387 uint8_t *reprocessFlags =
11388 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11389 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11390 *reprocessFlags)) {
11391 rc = BAD_VALUE;
11392 }
11393 }
11394
Thierry Strudel54dc9782017-02-15 12:12:10 -080011395 // Add exif debug data to internal metadata
11396 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11397 mm_jpeg_debug_exif_params_t *debug_params =
11398 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11399 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11400 // AE
11401 if (debug_params->ae_debug_params_valid == TRUE) {
11402 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11403 debug_params->ae_debug_params);
11404 }
11405 // AWB
11406 if (debug_params->awb_debug_params_valid == TRUE) {
11407 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11408 debug_params->awb_debug_params);
11409 }
11410 // AF
11411 if (debug_params->af_debug_params_valid == TRUE) {
11412 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11413 debug_params->af_debug_params);
11414 }
11415 // ASD
11416 if (debug_params->asd_debug_params_valid == TRUE) {
11417 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11418 debug_params->asd_debug_params);
11419 }
11420 // Stats
11421 if (debug_params->stats_debug_params_valid == TRUE) {
11422 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11423 debug_params->stats_debug_params);
11424 }
11425 // BE Stats
11426 if (debug_params->bestats_debug_params_valid == TRUE) {
11427 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11428 debug_params->bestats_debug_params);
11429 }
11430 // BHIST
11431 if (debug_params->bhist_debug_params_valid == TRUE) {
11432 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11433 debug_params->bhist_debug_params);
11434 }
11435 // 3A Tuning
11436 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11437 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11438 debug_params->q3a_tuning_debug_params);
11439 }
11440 }
11441
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011442 // Add metadata which reprocess needs
11443 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11444 cam_reprocess_info_t *repro_info =
11445 (cam_reprocess_info_t *)frame_settings.find
11446 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011447 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011448 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011449 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011450 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011451 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011452 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011453 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011454 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011455 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011456 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011457 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011458 repro_info->pipeline_flip);
11459 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11460 repro_info->af_roi);
11461 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11462 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011463 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11464 CAM_INTF_PARM_ROTATION metadata then has been added in
11465 translateToHalMetadata. HAL need to keep this new rotation
11466 metadata. Otherwise, the old rotation info saved in the vendor tag
11467 would be used */
11468 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11469 CAM_INTF_PARM_ROTATION, reprocParam) {
11470 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11471 } else {
11472 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011473 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011474 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011475 }
11476
11477 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11478 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11479 roi.width and roi.height would be the final JPEG size.
11480 For now, HAL only checks this for reprocess request */
11481 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11482 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11483 uint8_t *enable =
11484 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11485 if (*enable == TRUE) {
11486 int32_t *crop_data =
11487 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11488 cam_stream_crop_info_t crop_meta;
11489 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11490 crop_meta.stream_id = 0;
11491 crop_meta.crop.left = crop_data[0];
11492 crop_meta.crop.top = crop_data[1];
11493 crop_meta.crop.width = crop_data[2];
11494 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011495 // The JPEG crop roi should match cpp output size
11496 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11497 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11498 crop_meta.roi_map.left = 0;
11499 crop_meta.roi_map.top = 0;
11500 crop_meta.roi_map.width = cpp_crop->crop.width;
11501 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011502 }
11503 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11504 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011505 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011506 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011507 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11508 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011509 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011510 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11511
11512 // Add JPEG scale information
11513 cam_dimension_t scale_dim;
11514 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11515 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11516 int32_t *roi =
11517 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11518 scale_dim.width = roi[2];
11519 scale_dim.height = roi[3];
11520 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11521 scale_dim);
11522 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11523 scale_dim.width, scale_dim.height, mCameraId);
11524 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011525 }
11526 }
11527
11528 return rc;
11529}
11530
11531/*===========================================================================
11532 * FUNCTION : saveRequestSettings
11533 *
11534 * DESCRIPTION: Add any settings that might have changed to the request settings
11535 * and save the settings to be applied on the frame
11536 *
11537 * PARAMETERS :
11538 * @jpegMetadata : the extracted and/or modified jpeg metadata
11539 * @request : request with initial settings
11540 *
11541 * RETURN :
11542 * camera_metadata_t* : pointer to the saved request settings
11543 *==========================================================================*/
11544camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11545 const CameraMetadata &jpegMetadata,
11546 camera3_capture_request_t *request)
11547{
11548 camera_metadata_t *resultMetadata;
11549 CameraMetadata camMetadata;
11550 camMetadata = request->settings;
11551
11552 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11553 int32_t thumbnail_size[2];
11554 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11555 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11556 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11557 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11558 }
11559
11560 if (request->input_buffer != NULL) {
11561 uint8_t reprocessFlags = 1;
11562 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11563 (uint8_t*)&reprocessFlags,
11564 sizeof(reprocessFlags));
11565 }
11566
11567 resultMetadata = camMetadata.release();
11568 return resultMetadata;
11569}
11570
11571/*===========================================================================
11572 * FUNCTION : setHalFpsRange
11573 *
11574 * DESCRIPTION: set FPS range parameter
11575 *
11576 *
11577 * PARAMETERS :
11578 * @settings : Metadata from framework
11579 * @hal_metadata: Metadata buffer
11580 *
11581 *
11582 * RETURN : success: NO_ERROR
11583 * failure:
11584 *==========================================================================*/
11585int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11586 metadata_buffer_t *hal_metadata)
11587{
11588 int32_t rc = NO_ERROR;
11589 cam_fps_range_t fps_range;
11590 fps_range.min_fps = (float)
11591 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11592 fps_range.max_fps = (float)
11593 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11594 fps_range.video_min_fps = fps_range.min_fps;
11595 fps_range.video_max_fps = fps_range.max_fps;
11596
11597 LOGD("aeTargetFpsRange fps: [%f %f]",
11598 fps_range.min_fps, fps_range.max_fps);
11599 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11600 * follows:
11601 * ---------------------------------------------------------------|
11602 * Video stream is absent in configure_streams |
11603 * (Camcorder preview before the first video record |
11604 * ---------------------------------------------------------------|
11605 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11606 * | | | vid_min/max_fps|
11607 * ---------------------------------------------------------------|
11608 * NO | [ 30, 240] | 240 | [240, 240] |
11609 * |-------------|-------------|----------------|
11610 * | [240, 240] | 240 | [240, 240] |
11611 * ---------------------------------------------------------------|
11612 * Video stream is present in configure_streams |
11613 * ---------------------------------------------------------------|
11614 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11615 * | | | vid_min/max_fps|
11616 * ---------------------------------------------------------------|
11617 * NO | [ 30, 240] | 240 | [240, 240] |
11618 * (camcorder prev |-------------|-------------|----------------|
11619 * after video rec | [240, 240] | 240 | [240, 240] |
11620 * is stopped) | | | |
11621 * ---------------------------------------------------------------|
11622 * YES | [ 30, 240] | 240 | [240, 240] |
11623 * |-------------|-------------|----------------|
11624 * | [240, 240] | 240 | [240, 240] |
11625 * ---------------------------------------------------------------|
11626 * When Video stream is absent in configure_streams,
11627 * preview fps = sensor_fps / batchsize
11628 * Eg: for 240fps at batchSize 4, preview = 60fps
11629 * for 120fps at batchSize 4, preview = 30fps
11630 *
11631 * When video stream is present in configure_streams, preview fps is as per
11632 * the ratio of preview buffers to video buffers requested in process
11633 * capture request
11634 */
11635 mBatchSize = 0;
11636 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11637 fps_range.min_fps = fps_range.video_max_fps;
11638 fps_range.video_min_fps = fps_range.video_max_fps;
11639 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11640 fps_range.max_fps);
11641 if (NAME_NOT_FOUND != val) {
11642 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11643 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11644 return BAD_VALUE;
11645 }
11646
11647 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11648 /* If batchmode is currently in progress and the fps changes,
11649 * set the flag to restart the sensor */
11650 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11651 (mHFRVideoFps != fps_range.max_fps)) {
11652 mNeedSensorRestart = true;
11653 }
11654 mHFRVideoFps = fps_range.max_fps;
11655 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11656 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11657 mBatchSize = MAX_HFR_BATCH_SIZE;
11658 }
11659 }
11660 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11661
11662 }
11663 } else {
11664 /* HFR mode is session param in backend/ISP. This should be reset when
11665 * in non-HFR mode */
11666 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11667 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11668 return BAD_VALUE;
11669 }
11670 }
11671 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11672 return BAD_VALUE;
11673 }
11674 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11675 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11676 return rc;
11677}
11678
11679/*===========================================================================
11680 * FUNCTION : translateToHalMetadata
11681 *
11682 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11683 *
11684 *
11685 * PARAMETERS :
11686 * @request : request sent from framework
11687 *
11688 *
11689 * RETURN : success: NO_ERROR
11690 * failure:
11691 *==========================================================================*/
11692int QCamera3HardwareInterface::translateToHalMetadata
11693 (const camera3_capture_request_t *request,
11694 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011695 uint32_t snapshotStreamId) {
11696 if (request == nullptr || hal_metadata == nullptr) {
11697 return BAD_VALUE;
11698 }
11699
11700 int64_t minFrameDuration = getMinFrameDuration(request);
11701
11702 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11703 minFrameDuration);
11704}
11705
11706int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11707 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11708 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11709
Thierry Strudel3d639192016-09-09 11:52:26 -070011710 int rc = 0;
11711 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011712 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011713
11714 /* Do not change the order of the following list unless you know what you are
11715 * doing.
11716 * The order is laid out in such a way that parameters in the front of the table
11717 * may be used to override the parameters later in the table. Examples are:
11718 * 1. META_MODE should precede AEC/AWB/AF MODE
11719 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11720 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11721 * 4. Any mode should precede it's corresponding settings
11722 */
11723 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11724 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11725 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11726 rc = BAD_VALUE;
11727 }
11728 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11729 if (rc != NO_ERROR) {
11730 LOGE("extractSceneMode failed");
11731 }
11732 }
11733
11734 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11735 uint8_t fwk_aeMode =
11736 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11737 uint8_t aeMode;
11738 int32_t redeye;
11739
11740 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11741 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011742 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11743 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011744 } else {
11745 aeMode = CAM_AE_MODE_ON;
11746 }
11747 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11748 redeye = 1;
11749 } else {
11750 redeye = 0;
11751 }
11752
11753 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11754 fwk_aeMode);
11755 if (NAME_NOT_FOUND != val) {
11756 int32_t flashMode = (int32_t)val;
11757 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11758 }
11759
11760 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11761 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11762 rc = BAD_VALUE;
11763 }
11764 }
11765
11766 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11767 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11768 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11769 fwk_whiteLevel);
11770 if (NAME_NOT_FOUND != val) {
11771 uint8_t whiteLevel = (uint8_t)val;
11772 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11773 rc = BAD_VALUE;
11774 }
11775 }
11776 }
11777
11778 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11779 uint8_t fwk_cacMode =
11780 frame_settings.find(
11781 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11782 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11783 fwk_cacMode);
11784 if (NAME_NOT_FOUND != val) {
11785 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11786 bool entryAvailable = FALSE;
11787 // Check whether Frameworks set CAC mode is supported in device or not
11788 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11789 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11790 entryAvailable = TRUE;
11791 break;
11792 }
11793 }
11794 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11795 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11796 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11797 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11798 if (entryAvailable == FALSE) {
11799 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11800 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11801 } else {
11802 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11803 // High is not supported and so set the FAST as spec say's underlying
11804 // device implementation can be the same for both modes.
11805 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11806 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11807 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11808 // in order to avoid the fps drop due to high quality
11809 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11810 } else {
11811 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11812 }
11813 }
11814 }
11815 LOGD("Final cacMode is %d", cacMode);
11816 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11817 rc = BAD_VALUE;
11818 }
11819 } else {
11820 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11821 }
11822 }
11823
Thierry Strudel2896d122017-02-23 19:18:03 -080011824 char af_value[PROPERTY_VALUE_MAX];
11825 property_get("persist.camera.af.infinity", af_value, "0");
11826
Jason Lee84ae9972017-02-24 13:24:24 -080011827 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011828 if (atoi(af_value) == 0) {
11829 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011830 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011831 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11832 fwk_focusMode);
11833 if (NAME_NOT_FOUND != val) {
11834 uint8_t focusMode = (uint8_t)val;
11835 LOGD("set focus mode %d", focusMode);
11836 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11837 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11838 rc = BAD_VALUE;
11839 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011840 }
11841 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011842 } else {
11843 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11844 LOGE("Focus forced to infinity %d", focusMode);
11845 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11846 rc = BAD_VALUE;
11847 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011848 }
11849
Jason Lee84ae9972017-02-24 13:24:24 -080011850 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11851 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011852 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11853 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11854 focalDistance)) {
11855 rc = BAD_VALUE;
11856 }
11857 }
11858
11859 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11860 uint8_t fwk_antibandingMode =
11861 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11862 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11863 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11864 if (NAME_NOT_FOUND != val) {
11865 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011866 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11867 if (m60HzZone) {
11868 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11869 } else {
11870 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11871 }
11872 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011873 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11874 hal_antibandingMode)) {
11875 rc = BAD_VALUE;
11876 }
11877 }
11878 }
11879
11880 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11881 int32_t expCompensation = frame_settings.find(
11882 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11883 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11884 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11885 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11886 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011887 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011888 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11889 expCompensation)) {
11890 rc = BAD_VALUE;
11891 }
11892 }
11893
11894 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11895 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11896 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11897 rc = BAD_VALUE;
11898 }
11899 }
11900 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11901 rc = setHalFpsRange(frame_settings, hal_metadata);
11902 if (rc != NO_ERROR) {
11903 LOGE("setHalFpsRange failed");
11904 }
11905 }
11906
11907 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11908 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11909 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11910 rc = BAD_VALUE;
11911 }
11912 }
11913
11914 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11915 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11916 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11917 fwk_effectMode);
11918 if (NAME_NOT_FOUND != val) {
11919 uint8_t effectMode = (uint8_t)val;
11920 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11921 rc = BAD_VALUE;
11922 }
11923 }
11924 }
11925
11926 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11927 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11928 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11929 colorCorrectMode)) {
11930 rc = BAD_VALUE;
11931 }
11932 }
11933
11934 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11935 cam_color_correct_gains_t colorCorrectGains;
11936 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11937 colorCorrectGains.gains[i] =
11938 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11939 }
11940 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11941 colorCorrectGains)) {
11942 rc = BAD_VALUE;
11943 }
11944 }
11945
11946 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11947 cam_color_correct_matrix_t colorCorrectTransform;
11948 cam_rational_type_t transform_elem;
11949 size_t num = 0;
11950 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11951 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11952 transform_elem.numerator =
11953 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11954 transform_elem.denominator =
11955 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11956 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11957 num++;
11958 }
11959 }
11960 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11961 colorCorrectTransform)) {
11962 rc = BAD_VALUE;
11963 }
11964 }
11965
11966 cam_trigger_t aecTrigger;
11967 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11968 aecTrigger.trigger_id = -1;
11969 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11970 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11971 aecTrigger.trigger =
11972 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11973 aecTrigger.trigger_id =
11974 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11975 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11976 aecTrigger)) {
11977 rc = BAD_VALUE;
11978 }
11979 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11980 aecTrigger.trigger, aecTrigger.trigger_id);
11981 }
11982
11983 /*af_trigger must come with a trigger id*/
11984 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11985 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11986 cam_trigger_t af_trigger;
11987 af_trigger.trigger =
11988 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11989 af_trigger.trigger_id =
11990 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11991 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11992 rc = BAD_VALUE;
11993 }
11994 LOGD("AfTrigger: %d AfTriggerID: %d",
11995 af_trigger.trigger, af_trigger.trigger_id);
11996 }
11997
11998 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
11999 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12000 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12001 rc = BAD_VALUE;
12002 }
12003 }
12004 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12005 cam_edge_application_t edge_application;
12006 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012007
Thierry Strudel3d639192016-09-09 11:52:26 -070012008 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12009 edge_application.sharpness = 0;
12010 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012011 edge_application.sharpness =
12012 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12013 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12014 int32_t sharpness =
12015 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12016 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12017 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12018 LOGD("Setting edge mode sharpness %d", sharpness);
12019 edge_application.sharpness = sharpness;
12020 }
12021 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012022 }
12023 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12024 rc = BAD_VALUE;
12025 }
12026 }
12027
12028 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12029 int32_t respectFlashMode = 1;
12030 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12031 uint8_t fwk_aeMode =
12032 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012033 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12034 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12035 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012036 respectFlashMode = 0;
12037 LOGH("AE Mode controls flash, ignore android.flash.mode");
12038 }
12039 }
12040 if (respectFlashMode) {
12041 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12042 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12043 LOGH("flash mode after mapping %d", val);
12044 // To check: CAM_INTF_META_FLASH_MODE usage
12045 if (NAME_NOT_FOUND != val) {
12046 uint8_t flashMode = (uint8_t)val;
12047 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12048 rc = BAD_VALUE;
12049 }
12050 }
12051 }
12052 }
12053
12054 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12055 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12056 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12057 rc = BAD_VALUE;
12058 }
12059 }
12060
12061 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12062 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12063 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12064 flashFiringTime)) {
12065 rc = BAD_VALUE;
12066 }
12067 }
12068
12069 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12070 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12071 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12072 hotPixelMode)) {
12073 rc = BAD_VALUE;
12074 }
12075 }
12076
12077 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12078 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12079 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12080 lensAperture)) {
12081 rc = BAD_VALUE;
12082 }
12083 }
12084
12085 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12086 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12087 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12088 filterDensity)) {
12089 rc = BAD_VALUE;
12090 }
12091 }
12092
12093 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12094 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12095 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12096 focalLength)) {
12097 rc = BAD_VALUE;
12098 }
12099 }
12100
12101 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12102 uint8_t optStabMode =
12103 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12104 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12105 optStabMode)) {
12106 rc = BAD_VALUE;
12107 }
12108 }
12109
12110 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12111 uint8_t videoStabMode =
12112 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12113 LOGD("videoStabMode from APP = %d", videoStabMode);
12114 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12115 videoStabMode)) {
12116 rc = BAD_VALUE;
12117 }
12118 }
12119
12120
12121 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12122 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12123 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12124 noiseRedMode)) {
12125 rc = BAD_VALUE;
12126 }
12127 }
12128
12129 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12130 float reprocessEffectiveExposureFactor =
12131 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12132 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12133 reprocessEffectiveExposureFactor)) {
12134 rc = BAD_VALUE;
12135 }
12136 }
12137
12138 cam_crop_region_t scalerCropRegion;
12139 bool scalerCropSet = false;
12140 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12141 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12142 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12143 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12144 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12145
12146 // Map coordinate system from active array to sensor output.
12147 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12148 scalerCropRegion.width, scalerCropRegion.height);
12149
12150 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12151 scalerCropRegion)) {
12152 rc = BAD_VALUE;
12153 }
12154 scalerCropSet = true;
12155 }
12156
12157 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12158 int64_t sensorExpTime =
12159 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12160 LOGD("setting sensorExpTime %lld", sensorExpTime);
12161 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12162 sensorExpTime)) {
12163 rc = BAD_VALUE;
12164 }
12165 }
12166
12167 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12168 int64_t sensorFrameDuration =
12169 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012170 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12171 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12172 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12173 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12174 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12175 sensorFrameDuration)) {
12176 rc = BAD_VALUE;
12177 }
12178 }
12179
12180 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12181 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12182 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12183 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12184 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12185 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12186 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12187 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12188 sensorSensitivity)) {
12189 rc = BAD_VALUE;
12190 }
12191 }
12192
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012193#ifndef USE_HAL_3_3
12194 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12195 int32_t ispSensitivity =
12196 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12197 if (ispSensitivity <
12198 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12199 ispSensitivity =
12200 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12201 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12202 }
12203 if (ispSensitivity >
12204 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12205 ispSensitivity =
12206 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12207 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12208 }
12209 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12210 ispSensitivity)) {
12211 rc = BAD_VALUE;
12212 }
12213 }
12214#endif
12215
Thierry Strudel3d639192016-09-09 11:52:26 -070012216 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12217 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12218 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12219 rc = BAD_VALUE;
12220 }
12221 }
12222
12223 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12224 uint8_t fwk_facedetectMode =
12225 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12226
12227 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12228 fwk_facedetectMode);
12229
12230 if (NAME_NOT_FOUND != val) {
12231 uint8_t facedetectMode = (uint8_t)val;
12232 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12233 facedetectMode)) {
12234 rc = BAD_VALUE;
12235 }
12236 }
12237 }
12238
Thierry Strudel54dc9782017-02-15 12:12:10 -080012239 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012240 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012241 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012242 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12243 histogramMode)) {
12244 rc = BAD_VALUE;
12245 }
12246 }
12247
12248 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12249 uint8_t sharpnessMapMode =
12250 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12251 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12252 sharpnessMapMode)) {
12253 rc = BAD_VALUE;
12254 }
12255 }
12256
12257 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12258 uint8_t tonemapMode =
12259 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12260 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12261 rc = BAD_VALUE;
12262 }
12263 }
12264 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12265 /*All tonemap channels will have the same number of points*/
12266 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12267 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12268 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12269 cam_rgb_tonemap_curves tonemapCurves;
12270 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12271 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12272 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12273 tonemapCurves.tonemap_points_cnt,
12274 CAM_MAX_TONEMAP_CURVE_SIZE);
12275 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12276 }
12277
12278 /* ch0 = G*/
12279 size_t point = 0;
12280 cam_tonemap_curve_t tonemapCurveGreen;
12281 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12282 for (size_t j = 0; j < 2; j++) {
12283 tonemapCurveGreen.tonemap_points[i][j] =
12284 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12285 point++;
12286 }
12287 }
12288 tonemapCurves.curves[0] = tonemapCurveGreen;
12289
12290 /* ch 1 = B */
12291 point = 0;
12292 cam_tonemap_curve_t tonemapCurveBlue;
12293 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12294 for (size_t j = 0; j < 2; j++) {
12295 tonemapCurveBlue.tonemap_points[i][j] =
12296 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12297 point++;
12298 }
12299 }
12300 tonemapCurves.curves[1] = tonemapCurveBlue;
12301
12302 /* ch 2 = R */
12303 point = 0;
12304 cam_tonemap_curve_t tonemapCurveRed;
12305 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12306 for (size_t j = 0; j < 2; j++) {
12307 tonemapCurveRed.tonemap_points[i][j] =
12308 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12309 point++;
12310 }
12311 }
12312 tonemapCurves.curves[2] = tonemapCurveRed;
12313
12314 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12315 tonemapCurves)) {
12316 rc = BAD_VALUE;
12317 }
12318 }
12319
12320 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12321 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12322 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12323 captureIntent)) {
12324 rc = BAD_VALUE;
12325 }
12326 }
12327
12328 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12329 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12330 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12331 blackLevelLock)) {
12332 rc = BAD_VALUE;
12333 }
12334 }
12335
12336 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12337 uint8_t lensShadingMapMode =
12338 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12339 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12340 lensShadingMapMode)) {
12341 rc = BAD_VALUE;
12342 }
12343 }
12344
12345 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12346 cam_area_t roi;
12347 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012348 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012349
12350 // Map coordinate system from active array to sensor output.
12351 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12352 roi.rect.height);
12353
12354 if (scalerCropSet) {
12355 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12356 }
12357 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12358 rc = BAD_VALUE;
12359 }
12360 }
12361
12362 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12363 cam_area_t roi;
12364 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012365 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012366
12367 // Map coordinate system from active array to sensor output.
12368 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12369 roi.rect.height);
12370
12371 if (scalerCropSet) {
12372 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12373 }
12374 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12375 rc = BAD_VALUE;
12376 }
12377 }
12378
12379 // CDS for non-HFR non-video mode
12380 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12381 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12382 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12383 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12384 LOGE("Invalid CDS mode %d!", *fwk_cds);
12385 } else {
12386 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12387 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12388 rc = BAD_VALUE;
12389 }
12390 }
12391 }
12392
Thierry Strudel04e026f2016-10-10 11:27:36 -070012393 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012394 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012395 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012396 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12397 }
12398 if (m_bVideoHdrEnabled)
12399 vhdr = CAM_VIDEO_HDR_MODE_ON;
12400
Thierry Strudel54dc9782017-02-15 12:12:10 -080012401 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12402
12403 if(vhdr != curr_hdr_state)
12404 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12405
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012406 rc = setVideoHdrMode(mParameters, vhdr);
12407 if (rc != NO_ERROR) {
12408 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012409 }
12410
12411 //IR
12412 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12413 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12414 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012415 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12416 uint8_t isIRon = 0;
12417
12418 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012419 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12420 LOGE("Invalid IR mode %d!", fwk_ir);
12421 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012422 if(isIRon != curr_ir_state )
12423 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12424
Thierry Strudel04e026f2016-10-10 11:27:36 -070012425 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12426 CAM_INTF_META_IR_MODE, fwk_ir)) {
12427 rc = BAD_VALUE;
12428 }
12429 }
12430 }
12431
Thierry Strudel54dc9782017-02-15 12:12:10 -080012432 //Binning Correction Mode
12433 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12434 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12435 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12436 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12437 || (0 > fwk_binning_correction)) {
12438 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12439 } else {
12440 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12441 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12442 rc = BAD_VALUE;
12443 }
12444 }
12445 }
12446
Thierry Strudel269c81a2016-10-12 12:13:59 -070012447 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12448 float aec_speed;
12449 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12450 LOGD("AEC Speed :%f", aec_speed);
12451 if ( aec_speed < 0 ) {
12452 LOGE("Invalid AEC mode %f!", aec_speed);
12453 } else {
12454 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12455 aec_speed)) {
12456 rc = BAD_VALUE;
12457 }
12458 }
12459 }
12460
12461 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12462 float awb_speed;
12463 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12464 LOGD("AWB Speed :%f", awb_speed);
12465 if ( awb_speed < 0 ) {
12466 LOGE("Invalid AWB mode %f!", awb_speed);
12467 } else {
12468 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12469 awb_speed)) {
12470 rc = BAD_VALUE;
12471 }
12472 }
12473 }
12474
Thierry Strudel3d639192016-09-09 11:52:26 -070012475 // TNR
12476 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12477 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12478 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012479 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012480 cam_denoise_param_t tnr;
12481 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12482 tnr.process_plates =
12483 (cam_denoise_process_type_t)frame_settings.find(
12484 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12485 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012486
12487 if(b_TnrRequested != curr_tnr_state)
12488 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12489
Thierry Strudel3d639192016-09-09 11:52:26 -070012490 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12491 rc = BAD_VALUE;
12492 }
12493 }
12494
Thierry Strudel54dc9782017-02-15 12:12:10 -080012495 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012496 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012497 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012498 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12499 *exposure_metering_mode)) {
12500 rc = BAD_VALUE;
12501 }
12502 }
12503
Thierry Strudel3d639192016-09-09 11:52:26 -070012504 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12505 int32_t fwk_testPatternMode =
12506 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12507 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12508 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12509
12510 if (NAME_NOT_FOUND != testPatternMode) {
12511 cam_test_pattern_data_t testPatternData;
12512 memset(&testPatternData, 0, sizeof(testPatternData));
12513 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12514 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12515 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12516 int32_t *fwk_testPatternData =
12517 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12518 testPatternData.r = fwk_testPatternData[0];
12519 testPatternData.b = fwk_testPatternData[3];
12520 switch (gCamCapability[mCameraId]->color_arrangement) {
12521 case CAM_FILTER_ARRANGEMENT_RGGB:
12522 case CAM_FILTER_ARRANGEMENT_GRBG:
12523 testPatternData.gr = fwk_testPatternData[1];
12524 testPatternData.gb = fwk_testPatternData[2];
12525 break;
12526 case CAM_FILTER_ARRANGEMENT_GBRG:
12527 case CAM_FILTER_ARRANGEMENT_BGGR:
12528 testPatternData.gr = fwk_testPatternData[2];
12529 testPatternData.gb = fwk_testPatternData[1];
12530 break;
12531 default:
12532 LOGE("color arrangement %d is not supported",
12533 gCamCapability[mCameraId]->color_arrangement);
12534 break;
12535 }
12536 }
12537 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12538 testPatternData)) {
12539 rc = BAD_VALUE;
12540 }
12541 } else {
12542 LOGE("Invalid framework sensor test pattern mode %d",
12543 fwk_testPatternMode);
12544 }
12545 }
12546
12547 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12548 size_t count = 0;
12549 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12550 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12551 gps_coords.data.d, gps_coords.count, count);
12552 if (gps_coords.count != count) {
12553 rc = BAD_VALUE;
12554 }
12555 }
12556
12557 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12558 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12559 size_t count = 0;
12560 const char *gps_methods_src = (const char *)
12561 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12562 memset(gps_methods, '\0', sizeof(gps_methods));
12563 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12564 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12565 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12566 if (GPS_PROCESSING_METHOD_SIZE != count) {
12567 rc = BAD_VALUE;
12568 }
12569 }
12570
12571 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12572 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12573 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12574 gps_timestamp)) {
12575 rc = BAD_VALUE;
12576 }
12577 }
12578
12579 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12580 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12581 cam_rotation_info_t rotation_info;
12582 if (orientation == 0) {
12583 rotation_info.rotation = ROTATE_0;
12584 } else if (orientation == 90) {
12585 rotation_info.rotation = ROTATE_90;
12586 } else if (orientation == 180) {
12587 rotation_info.rotation = ROTATE_180;
12588 } else if (orientation == 270) {
12589 rotation_info.rotation = ROTATE_270;
12590 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012591 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012592 rotation_info.streamId = snapshotStreamId;
12593 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12594 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12595 rc = BAD_VALUE;
12596 }
12597 }
12598
12599 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12600 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12601 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12602 rc = BAD_VALUE;
12603 }
12604 }
12605
12606 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12607 uint32_t thumb_quality = (uint32_t)
12608 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12609 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12610 thumb_quality)) {
12611 rc = BAD_VALUE;
12612 }
12613 }
12614
12615 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12616 cam_dimension_t dim;
12617 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12618 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12619 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12620 rc = BAD_VALUE;
12621 }
12622 }
12623
12624 // Internal metadata
12625 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12626 size_t count = 0;
12627 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12628 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12629 privatedata.data.i32, privatedata.count, count);
12630 if (privatedata.count != count) {
12631 rc = BAD_VALUE;
12632 }
12633 }
12634
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012635 // ISO/Exposure Priority
12636 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12637 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12638 cam_priority_mode_t mode =
12639 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12640 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12641 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12642 use_iso_exp_pty.previewOnly = FALSE;
12643 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12644 use_iso_exp_pty.value = *ptr;
12645
12646 if(CAM_ISO_PRIORITY == mode) {
12647 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12648 use_iso_exp_pty)) {
12649 rc = BAD_VALUE;
12650 }
12651 }
12652 else {
12653 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12654 use_iso_exp_pty)) {
12655 rc = BAD_VALUE;
12656 }
12657 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012658
12659 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12660 rc = BAD_VALUE;
12661 }
12662 }
12663 } else {
12664 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12665 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012666 }
12667 }
12668
12669 // Saturation
12670 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12671 int32_t* use_saturation =
12672 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12673 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12674 rc = BAD_VALUE;
12675 }
12676 }
12677
Thierry Strudel3d639192016-09-09 11:52:26 -070012678 // EV step
12679 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12680 gCamCapability[mCameraId]->exp_compensation_step)) {
12681 rc = BAD_VALUE;
12682 }
12683
12684 // CDS info
12685 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12686 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12687 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12688
12689 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12690 CAM_INTF_META_CDS_DATA, *cdsData)) {
12691 rc = BAD_VALUE;
12692 }
12693 }
12694
Shuzhen Wang19463d72016-03-08 11:09:52 -080012695 // Hybrid AE
12696 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12697 uint8_t *hybrid_ae = (uint8_t *)
12698 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12699
12700 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12701 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12702 rc = BAD_VALUE;
12703 }
12704 }
12705
Shuzhen Wang14415f52016-11-16 18:26:18 -080012706 // Histogram
12707 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12708 uint8_t histogramMode =
12709 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12710 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12711 histogramMode)) {
12712 rc = BAD_VALUE;
12713 }
12714 }
12715
12716 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12717 int32_t histogramBins =
12718 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12719 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12720 histogramBins)) {
12721 rc = BAD_VALUE;
12722 }
12723 }
12724
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012725 // Tracking AF
12726 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12727 uint8_t trackingAfTrigger =
12728 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12729 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12730 trackingAfTrigger)) {
12731 rc = BAD_VALUE;
12732 }
12733 }
12734
Thierry Strudel3d639192016-09-09 11:52:26 -070012735 return rc;
12736}
12737
12738/*===========================================================================
12739 * FUNCTION : captureResultCb
12740 *
12741 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12742 *
12743 * PARAMETERS :
12744 * @frame : frame information from mm-camera-interface
12745 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12746 * @userdata: userdata
12747 *
12748 * RETURN : NONE
12749 *==========================================================================*/
12750void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12751 camera3_stream_buffer_t *buffer,
12752 uint32_t frame_number, bool isInputBuffer, void *userdata)
12753{
12754 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12755 if (hw == NULL) {
12756 LOGE("Invalid hw %p", hw);
12757 return;
12758 }
12759
12760 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12761 return;
12762}
12763
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012764/*===========================================================================
12765 * FUNCTION : setBufferErrorStatus
12766 *
12767 * DESCRIPTION: Callback handler for channels to report any buffer errors
12768 *
12769 * PARAMETERS :
12770 * @ch : Channel on which buffer error is reported from
12771 * @frame_number : frame number on which buffer error is reported on
12772 * @buffer_status : buffer error status
12773 * @userdata: userdata
12774 *
12775 * RETURN : NONE
12776 *==========================================================================*/
12777void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12778 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12779{
12780 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12781 if (hw == NULL) {
12782 LOGE("Invalid hw %p", hw);
12783 return;
12784 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012785
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012786 hw->setBufferErrorStatus(ch, frame_number, err);
12787 return;
12788}
12789
12790void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12791 uint32_t frameNumber, camera3_buffer_status_t err)
12792{
12793 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12794 pthread_mutex_lock(&mMutex);
12795
12796 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12797 if (req.frame_number != frameNumber)
12798 continue;
12799 for (auto& k : req.mPendingBufferList) {
12800 if(k.stream->priv == ch) {
12801 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12802 }
12803 }
12804 }
12805
12806 pthread_mutex_unlock(&mMutex);
12807 return;
12808}
Thierry Strudel3d639192016-09-09 11:52:26 -070012809/*===========================================================================
12810 * FUNCTION : initialize
12811 *
12812 * DESCRIPTION: Pass framework callback pointers to HAL
12813 *
12814 * PARAMETERS :
12815 *
12816 *
12817 * RETURN : Success : 0
12818 * Failure: -ENODEV
12819 *==========================================================================*/
12820
12821int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12822 const camera3_callback_ops_t *callback_ops)
12823{
12824 LOGD("E");
12825 QCamera3HardwareInterface *hw =
12826 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12827 if (!hw) {
12828 LOGE("NULL camera device");
12829 return -ENODEV;
12830 }
12831
12832 int rc = hw->initialize(callback_ops);
12833 LOGD("X");
12834 return rc;
12835}
12836
12837/*===========================================================================
12838 * FUNCTION : configure_streams
12839 *
12840 * DESCRIPTION:
12841 *
12842 * PARAMETERS :
12843 *
12844 *
12845 * RETURN : Success: 0
12846 * Failure: -EINVAL (if stream configuration is invalid)
12847 * -ENODEV (fatal error)
12848 *==========================================================================*/
12849
12850int QCamera3HardwareInterface::configure_streams(
12851 const struct camera3_device *device,
12852 camera3_stream_configuration_t *stream_list)
12853{
12854 LOGD("E");
12855 QCamera3HardwareInterface *hw =
12856 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12857 if (!hw) {
12858 LOGE("NULL camera device");
12859 return -ENODEV;
12860 }
12861 int rc = hw->configureStreams(stream_list);
12862 LOGD("X");
12863 return rc;
12864}
12865
12866/*===========================================================================
12867 * FUNCTION : construct_default_request_settings
12868 *
12869 * DESCRIPTION: Configure a settings buffer to meet the required use case
12870 *
12871 * PARAMETERS :
12872 *
12873 *
12874 * RETURN : Success: Return valid metadata
12875 * Failure: Return NULL
12876 *==========================================================================*/
12877const camera_metadata_t* QCamera3HardwareInterface::
12878 construct_default_request_settings(const struct camera3_device *device,
12879 int type)
12880{
12881
12882 LOGD("E");
12883 camera_metadata_t* fwk_metadata = NULL;
12884 QCamera3HardwareInterface *hw =
12885 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12886 if (!hw) {
12887 LOGE("NULL camera device");
12888 return NULL;
12889 }
12890
12891 fwk_metadata = hw->translateCapabilityToMetadata(type);
12892
12893 LOGD("X");
12894 return fwk_metadata;
12895}
12896
12897/*===========================================================================
12898 * FUNCTION : process_capture_request
12899 *
12900 * DESCRIPTION:
12901 *
12902 * PARAMETERS :
12903 *
12904 *
12905 * RETURN :
12906 *==========================================================================*/
12907int QCamera3HardwareInterface::process_capture_request(
12908 const struct camera3_device *device,
12909 camera3_capture_request_t *request)
12910{
12911 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012912 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012913 QCamera3HardwareInterface *hw =
12914 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12915 if (!hw) {
12916 LOGE("NULL camera device");
12917 return -EINVAL;
12918 }
12919
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012920 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012921 LOGD("X");
12922 return rc;
12923}
12924
12925/*===========================================================================
12926 * FUNCTION : dump
12927 *
12928 * DESCRIPTION:
12929 *
12930 * PARAMETERS :
12931 *
12932 *
12933 * RETURN :
12934 *==========================================================================*/
12935
12936void QCamera3HardwareInterface::dump(
12937 const struct camera3_device *device, int fd)
12938{
12939 /* Log level property is read when "adb shell dumpsys media.camera" is
12940 called so that the log level can be controlled without restarting
12941 the media server */
12942 getLogLevel();
12943
12944 LOGD("E");
12945 QCamera3HardwareInterface *hw =
12946 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12947 if (!hw) {
12948 LOGE("NULL camera device");
12949 return;
12950 }
12951
12952 hw->dump(fd);
12953 LOGD("X");
12954 return;
12955}
12956
12957/*===========================================================================
12958 * FUNCTION : flush
12959 *
12960 * DESCRIPTION:
12961 *
12962 * PARAMETERS :
12963 *
12964 *
12965 * RETURN :
12966 *==========================================================================*/
12967
12968int QCamera3HardwareInterface::flush(
12969 const struct camera3_device *device)
12970{
12971 int rc;
12972 LOGD("E");
12973 QCamera3HardwareInterface *hw =
12974 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12975 if (!hw) {
12976 LOGE("NULL camera device");
12977 return -EINVAL;
12978 }
12979
12980 pthread_mutex_lock(&hw->mMutex);
12981 // Validate current state
12982 switch (hw->mState) {
12983 case STARTED:
12984 /* valid state */
12985 break;
12986
12987 case ERROR:
12988 pthread_mutex_unlock(&hw->mMutex);
12989 hw->handleCameraDeviceError();
12990 return -ENODEV;
12991
12992 default:
12993 LOGI("Flush returned during state %d", hw->mState);
12994 pthread_mutex_unlock(&hw->mMutex);
12995 return 0;
12996 }
12997 pthread_mutex_unlock(&hw->mMutex);
12998
12999 rc = hw->flush(true /* restart channels */ );
13000 LOGD("X");
13001 return rc;
13002}
13003
13004/*===========================================================================
13005 * FUNCTION : close_camera_device
13006 *
13007 * DESCRIPTION:
13008 *
13009 * PARAMETERS :
13010 *
13011 *
13012 * RETURN :
13013 *==========================================================================*/
13014int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13015{
13016 int ret = NO_ERROR;
13017 QCamera3HardwareInterface *hw =
13018 reinterpret_cast<QCamera3HardwareInterface *>(
13019 reinterpret_cast<camera3_device_t *>(device)->priv);
13020 if (!hw) {
13021 LOGE("NULL camera device");
13022 return BAD_VALUE;
13023 }
13024
13025 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13026 delete hw;
13027 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013028 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013029 return ret;
13030}
13031
13032/*===========================================================================
13033 * FUNCTION : getWaveletDenoiseProcessPlate
13034 *
13035 * DESCRIPTION: query wavelet denoise process plate
13036 *
13037 * PARAMETERS : None
13038 *
13039 * RETURN : WNR prcocess plate value
13040 *==========================================================================*/
13041cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13042{
13043 char prop[PROPERTY_VALUE_MAX];
13044 memset(prop, 0, sizeof(prop));
13045 property_get("persist.denoise.process.plates", prop, "0");
13046 int processPlate = atoi(prop);
13047 switch(processPlate) {
13048 case 0:
13049 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13050 case 1:
13051 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13052 case 2:
13053 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13054 case 3:
13055 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13056 default:
13057 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13058 }
13059}
13060
13061
13062/*===========================================================================
13063 * FUNCTION : getTemporalDenoiseProcessPlate
13064 *
13065 * DESCRIPTION: query temporal denoise process plate
13066 *
13067 * PARAMETERS : None
13068 *
13069 * RETURN : TNR prcocess plate value
13070 *==========================================================================*/
13071cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13072{
13073 char prop[PROPERTY_VALUE_MAX];
13074 memset(prop, 0, sizeof(prop));
13075 property_get("persist.tnr.process.plates", prop, "0");
13076 int processPlate = atoi(prop);
13077 switch(processPlate) {
13078 case 0:
13079 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13080 case 1:
13081 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13082 case 2:
13083 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13084 case 3:
13085 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13086 default:
13087 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13088 }
13089}
13090
13091
13092/*===========================================================================
13093 * FUNCTION : extractSceneMode
13094 *
13095 * DESCRIPTION: Extract scene mode from frameworks set metadata
13096 *
13097 * PARAMETERS :
13098 * @frame_settings: CameraMetadata reference
13099 * @metaMode: ANDROID_CONTORL_MODE
13100 * @hal_metadata: hal metadata structure
13101 *
13102 * RETURN : None
13103 *==========================================================================*/
13104int32_t QCamera3HardwareInterface::extractSceneMode(
13105 const CameraMetadata &frame_settings, uint8_t metaMode,
13106 metadata_buffer_t *hal_metadata)
13107{
13108 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013109 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13110
13111 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13112 LOGD("Ignoring control mode OFF_KEEP_STATE");
13113 return NO_ERROR;
13114 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013115
13116 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13117 camera_metadata_ro_entry entry =
13118 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13119 if (0 == entry.count)
13120 return rc;
13121
13122 uint8_t fwk_sceneMode = entry.data.u8[0];
13123
13124 int val = lookupHalName(SCENE_MODES_MAP,
13125 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13126 fwk_sceneMode);
13127 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013128 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013129 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013130 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013131 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013132
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013133 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13134 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13135 }
13136
13137 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13138 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013139 cam_hdr_param_t hdr_params;
13140 hdr_params.hdr_enable = 1;
13141 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13142 hdr_params.hdr_need_1x = false;
13143 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13144 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13145 rc = BAD_VALUE;
13146 }
13147 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013148
Thierry Strudel3d639192016-09-09 11:52:26 -070013149 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13150 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13151 rc = BAD_VALUE;
13152 }
13153 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013154
13155 if (mForceHdrSnapshot) {
13156 cam_hdr_param_t hdr_params;
13157 hdr_params.hdr_enable = 1;
13158 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13159 hdr_params.hdr_need_1x = false;
13160 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13161 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13162 rc = BAD_VALUE;
13163 }
13164 }
13165
Thierry Strudel3d639192016-09-09 11:52:26 -070013166 return rc;
13167}
13168
13169/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013170 * FUNCTION : setVideoHdrMode
13171 *
13172 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13173 *
13174 * PARAMETERS :
13175 * @hal_metadata: hal metadata structure
13176 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13177 *
13178 * RETURN : None
13179 *==========================================================================*/
13180int32_t QCamera3HardwareInterface::setVideoHdrMode(
13181 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13182{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013183 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13184 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13185 }
13186
13187 LOGE("Invalid Video HDR mode %d!", vhdr);
13188 return BAD_VALUE;
13189}
13190
13191/*===========================================================================
13192 * FUNCTION : setSensorHDR
13193 *
13194 * DESCRIPTION: Enable/disable sensor HDR.
13195 *
13196 * PARAMETERS :
13197 * @hal_metadata: hal metadata structure
13198 * @enable: boolean whether to enable/disable sensor HDR
13199 *
13200 * RETURN : None
13201 *==========================================================================*/
13202int32_t QCamera3HardwareInterface::setSensorHDR(
13203 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13204{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013205 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013206 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13207
13208 if (enable) {
13209 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13210 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13211 #ifdef _LE_CAMERA_
13212 //Default to staggered HDR for IOT
13213 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13214 #else
13215 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13216 #endif
13217 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13218 }
13219
13220 bool isSupported = false;
13221 switch (sensor_hdr) {
13222 case CAM_SENSOR_HDR_IN_SENSOR:
13223 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13224 CAM_QCOM_FEATURE_SENSOR_HDR) {
13225 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013226 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013227 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013228 break;
13229 case CAM_SENSOR_HDR_ZIGZAG:
13230 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13231 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13232 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013233 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013234 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013235 break;
13236 case CAM_SENSOR_HDR_STAGGERED:
13237 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13238 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13239 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013240 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013241 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013242 break;
13243 case CAM_SENSOR_HDR_OFF:
13244 isSupported = true;
13245 LOGD("Turning off sensor HDR");
13246 break;
13247 default:
13248 LOGE("HDR mode %d not supported", sensor_hdr);
13249 rc = BAD_VALUE;
13250 break;
13251 }
13252
13253 if(isSupported) {
13254 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13255 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13256 rc = BAD_VALUE;
13257 } else {
13258 if(!isVideoHdrEnable)
13259 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013260 }
13261 }
13262 return rc;
13263}
13264
13265/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013266 * FUNCTION : needRotationReprocess
13267 *
13268 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13269 *
13270 * PARAMETERS : none
13271 *
13272 * RETURN : true: needed
13273 * false: no need
13274 *==========================================================================*/
13275bool QCamera3HardwareInterface::needRotationReprocess()
13276{
13277 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13278 // current rotation is not zero, and pp has the capability to process rotation
13279 LOGH("need do reprocess for rotation");
13280 return true;
13281 }
13282
13283 return false;
13284}
13285
13286/*===========================================================================
13287 * FUNCTION : needReprocess
13288 *
13289 * DESCRIPTION: if reprocess in needed
13290 *
13291 * PARAMETERS : none
13292 *
13293 * RETURN : true: needed
13294 * false: no need
13295 *==========================================================================*/
13296bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13297{
13298 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13299 // TODO: add for ZSL HDR later
13300 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13301 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13302 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13303 return true;
13304 } else {
13305 LOGH("already post processed frame");
13306 return false;
13307 }
13308 }
13309 return needRotationReprocess();
13310}
13311
13312/*===========================================================================
13313 * FUNCTION : needJpegExifRotation
13314 *
13315 * DESCRIPTION: if rotation from jpeg is needed
13316 *
13317 * PARAMETERS : none
13318 *
13319 * RETURN : true: needed
13320 * false: no need
13321 *==========================================================================*/
13322bool QCamera3HardwareInterface::needJpegExifRotation()
13323{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013324 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013325 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13326 LOGD("Need use Jpeg EXIF Rotation");
13327 return true;
13328 }
13329 return false;
13330}
13331
13332/*===========================================================================
13333 * FUNCTION : addOfflineReprocChannel
13334 *
13335 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13336 * coming from input channel
13337 *
13338 * PARAMETERS :
13339 * @config : reprocess configuration
13340 * @inputChHandle : pointer to the input (source) channel
13341 *
13342 *
13343 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13344 *==========================================================================*/
13345QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13346 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13347{
13348 int32_t rc = NO_ERROR;
13349 QCamera3ReprocessChannel *pChannel = NULL;
13350
13351 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013352 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13353 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013354 if (NULL == pChannel) {
13355 LOGE("no mem for reprocess channel");
13356 return NULL;
13357 }
13358
13359 rc = pChannel->initialize(IS_TYPE_NONE);
13360 if (rc != NO_ERROR) {
13361 LOGE("init reprocess channel failed, ret = %d", rc);
13362 delete pChannel;
13363 return NULL;
13364 }
13365
13366 // pp feature config
13367 cam_pp_feature_config_t pp_config;
13368 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13369
13370 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13371 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13372 & CAM_QCOM_FEATURE_DSDN) {
13373 //Use CPP CDS incase h/w supports it.
13374 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13375 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13376 }
13377 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13378 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13379 }
13380
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013381 if (config.hdr_param.hdr_enable) {
13382 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13383 pp_config.hdr_param = config.hdr_param;
13384 }
13385
13386 if (mForceHdrSnapshot) {
13387 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13388 pp_config.hdr_param.hdr_enable = 1;
13389 pp_config.hdr_param.hdr_need_1x = 0;
13390 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13391 }
13392
Thierry Strudel3d639192016-09-09 11:52:26 -070013393 rc = pChannel->addReprocStreamsFromSource(pp_config,
13394 config,
13395 IS_TYPE_NONE,
13396 mMetadataChannel);
13397
13398 if (rc != NO_ERROR) {
13399 delete pChannel;
13400 return NULL;
13401 }
13402 return pChannel;
13403}
13404
13405/*===========================================================================
13406 * FUNCTION : getMobicatMask
13407 *
13408 * DESCRIPTION: returns mobicat mask
13409 *
13410 * PARAMETERS : none
13411 *
13412 * RETURN : mobicat mask
13413 *
13414 *==========================================================================*/
13415uint8_t QCamera3HardwareInterface::getMobicatMask()
13416{
13417 return m_MobicatMask;
13418}
13419
13420/*===========================================================================
13421 * FUNCTION : setMobicat
13422 *
13423 * DESCRIPTION: set Mobicat on/off.
13424 *
13425 * PARAMETERS :
13426 * @params : none
13427 *
13428 * RETURN : int32_t type of status
13429 * NO_ERROR -- success
13430 * none-zero failure code
13431 *==========================================================================*/
13432int32_t QCamera3HardwareInterface::setMobicat()
13433{
13434 char value [PROPERTY_VALUE_MAX];
13435 property_get("persist.camera.mobicat", value, "0");
13436 int32_t ret = NO_ERROR;
13437 uint8_t enableMobi = (uint8_t)atoi(value);
13438
13439 if (enableMobi) {
13440 tune_cmd_t tune_cmd;
13441 tune_cmd.type = SET_RELOAD_CHROMATIX;
13442 tune_cmd.module = MODULE_ALL;
13443 tune_cmd.value = TRUE;
13444 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13445 CAM_INTF_PARM_SET_VFE_COMMAND,
13446 tune_cmd);
13447
13448 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13449 CAM_INTF_PARM_SET_PP_COMMAND,
13450 tune_cmd);
13451 }
13452 m_MobicatMask = enableMobi;
13453
13454 return ret;
13455}
13456
13457/*===========================================================================
13458* FUNCTION : getLogLevel
13459*
13460* DESCRIPTION: Reads the log level property into a variable
13461*
13462* PARAMETERS :
13463* None
13464*
13465* RETURN :
13466* None
13467*==========================================================================*/
13468void QCamera3HardwareInterface::getLogLevel()
13469{
13470 char prop[PROPERTY_VALUE_MAX];
13471 uint32_t globalLogLevel = 0;
13472
13473 property_get("persist.camera.hal.debug", prop, "0");
13474 int val = atoi(prop);
13475 if (0 <= val) {
13476 gCamHal3LogLevel = (uint32_t)val;
13477 }
13478
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013479 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013480 gKpiDebugLevel = atoi(prop);
13481
13482 property_get("persist.camera.global.debug", prop, "0");
13483 val = atoi(prop);
13484 if (0 <= val) {
13485 globalLogLevel = (uint32_t)val;
13486 }
13487
13488 /* Highest log level among hal.logs and global.logs is selected */
13489 if (gCamHal3LogLevel < globalLogLevel)
13490 gCamHal3LogLevel = globalLogLevel;
13491
13492 return;
13493}
13494
13495/*===========================================================================
13496 * FUNCTION : validateStreamRotations
13497 *
13498 * DESCRIPTION: Check if the rotations requested are supported
13499 *
13500 * PARAMETERS :
13501 * @stream_list : streams to be configured
13502 *
13503 * RETURN : NO_ERROR on success
13504 * -EINVAL on failure
13505 *
13506 *==========================================================================*/
13507int QCamera3HardwareInterface::validateStreamRotations(
13508 camera3_stream_configuration_t *streamList)
13509{
13510 int rc = NO_ERROR;
13511
13512 /*
13513 * Loop through all streams requested in configuration
13514 * Check if unsupported rotations have been requested on any of them
13515 */
13516 for (size_t j = 0; j < streamList->num_streams; j++){
13517 camera3_stream_t *newStream = streamList->streams[j];
13518
13519 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13520 bool isImplDef = (newStream->format ==
13521 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13522 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13523 isImplDef);
13524
13525 if (isRotated && (!isImplDef || isZsl)) {
13526 LOGE("Error: Unsupported rotation of %d requested for stream"
13527 "type:%d and stream format:%d",
13528 newStream->rotation, newStream->stream_type,
13529 newStream->format);
13530 rc = -EINVAL;
13531 break;
13532 }
13533 }
13534
13535 return rc;
13536}
13537
13538/*===========================================================================
13539* FUNCTION : getFlashInfo
13540*
13541* DESCRIPTION: Retrieve information about whether the device has a flash.
13542*
13543* PARAMETERS :
13544* @cameraId : Camera id to query
13545* @hasFlash : Boolean indicating whether there is a flash device
13546* associated with given camera
13547* @flashNode : If a flash device exists, this will be its device node.
13548*
13549* RETURN :
13550* None
13551*==========================================================================*/
13552void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13553 bool& hasFlash,
13554 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13555{
13556 cam_capability_t* camCapability = gCamCapability[cameraId];
13557 if (NULL == camCapability) {
13558 hasFlash = false;
13559 flashNode[0] = '\0';
13560 } else {
13561 hasFlash = camCapability->flash_available;
13562 strlcpy(flashNode,
13563 (char*)camCapability->flash_dev_name,
13564 QCAMERA_MAX_FILEPATH_LENGTH);
13565 }
13566}
13567
13568/*===========================================================================
13569* FUNCTION : getEepromVersionInfo
13570*
13571* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13572*
13573* PARAMETERS : None
13574*
13575* RETURN : string describing EEPROM version
13576* "\0" if no such info available
13577*==========================================================================*/
13578const char *QCamera3HardwareInterface::getEepromVersionInfo()
13579{
13580 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13581}
13582
13583/*===========================================================================
13584* FUNCTION : getLdafCalib
13585*
13586* DESCRIPTION: Retrieve Laser AF calibration data
13587*
13588* PARAMETERS : None
13589*
13590* RETURN : Two uint32_t describing laser AF calibration data
13591* NULL if none is available.
13592*==========================================================================*/
13593const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13594{
13595 if (mLdafCalibExist) {
13596 return &mLdafCalib[0];
13597 } else {
13598 return NULL;
13599 }
13600}
13601
13602/*===========================================================================
13603 * FUNCTION : dynamicUpdateMetaStreamInfo
13604 *
13605 * DESCRIPTION: This function:
13606 * (1) stops all the channels
13607 * (2) returns error on pending requests and buffers
13608 * (3) sends metastream_info in setparams
13609 * (4) starts all channels
13610 * This is useful when sensor has to be restarted to apply any
13611 * settings such as frame rate from a different sensor mode
13612 *
13613 * PARAMETERS : None
13614 *
13615 * RETURN : NO_ERROR on success
13616 * Error codes on failure
13617 *
13618 *==========================================================================*/
13619int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13620{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013621 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013622 int rc = NO_ERROR;
13623
13624 LOGD("E");
13625
13626 rc = stopAllChannels();
13627 if (rc < 0) {
13628 LOGE("stopAllChannels failed");
13629 return rc;
13630 }
13631
13632 rc = notifyErrorForPendingRequests();
13633 if (rc < 0) {
13634 LOGE("notifyErrorForPendingRequests failed");
13635 return rc;
13636 }
13637
13638 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13639 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13640 "Format:%d",
13641 mStreamConfigInfo.type[i],
13642 mStreamConfigInfo.stream_sizes[i].width,
13643 mStreamConfigInfo.stream_sizes[i].height,
13644 mStreamConfigInfo.postprocess_mask[i],
13645 mStreamConfigInfo.format[i]);
13646 }
13647
13648 /* Send meta stream info once again so that ISP can start */
13649 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13650 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13651 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13652 mParameters);
13653 if (rc < 0) {
13654 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13655 }
13656
13657 rc = startAllChannels();
13658 if (rc < 0) {
13659 LOGE("startAllChannels failed");
13660 return rc;
13661 }
13662
13663 LOGD("X");
13664 return rc;
13665}
13666
13667/*===========================================================================
13668 * FUNCTION : stopAllChannels
13669 *
13670 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13671 *
13672 * PARAMETERS : None
13673 *
13674 * RETURN : NO_ERROR on success
13675 * Error codes on failure
13676 *
13677 *==========================================================================*/
13678int32_t QCamera3HardwareInterface::stopAllChannels()
13679{
13680 int32_t rc = NO_ERROR;
13681
13682 LOGD("Stopping all channels");
13683 // Stop the Streams/Channels
13684 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13685 it != mStreamInfo.end(); it++) {
13686 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13687 if (channel) {
13688 channel->stop();
13689 }
13690 (*it)->status = INVALID;
13691 }
13692
13693 if (mSupportChannel) {
13694 mSupportChannel->stop();
13695 }
13696 if (mAnalysisChannel) {
13697 mAnalysisChannel->stop();
13698 }
13699 if (mRawDumpChannel) {
13700 mRawDumpChannel->stop();
13701 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013702 if (mHdrPlusRawSrcChannel) {
13703 mHdrPlusRawSrcChannel->stop();
13704 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013705 if (mMetadataChannel) {
13706 /* If content of mStreamInfo is not 0, there is metadata stream */
13707 mMetadataChannel->stop();
13708 }
13709
13710 LOGD("All channels stopped");
13711 return rc;
13712}
13713
13714/*===========================================================================
13715 * FUNCTION : startAllChannels
13716 *
13717 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13718 *
13719 * PARAMETERS : None
13720 *
13721 * RETURN : NO_ERROR on success
13722 * Error codes on failure
13723 *
13724 *==========================================================================*/
13725int32_t QCamera3HardwareInterface::startAllChannels()
13726{
13727 int32_t rc = NO_ERROR;
13728
13729 LOGD("Start all channels ");
13730 // Start the Streams/Channels
13731 if (mMetadataChannel) {
13732 /* If content of mStreamInfo is not 0, there is metadata stream */
13733 rc = mMetadataChannel->start();
13734 if (rc < 0) {
13735 LOGE("META channel start failed");
13736 return rc;
13737 }
13738 }
13739 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13740 it != mStreamInfo.end(); it++) {
13741 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13742 if (channel) {
13743 rc = channel->start();
13744 if (rc < 0) {
13745 LOGE("channel start failed");
13746 return rc;
13747 }
13748 }
13749 }
13750 if (mAnalysisChannel) {
13751 mAnalysisChannel->start();
13752 }
13753 if (mSupportChannel) {
13754 rc = mSupportChannel->start();
13755 if (rc < 0) {
13756 LOGE("Support channel start failed");
13757 return rc;
13758 }
13759 }
13760 if (mRawDumpChannel) {
13761 rc = mRawDumpChannel->start();
13762 if (rc < 0) {
13763 LOGE("RAW dump channel start failed");
13764 return rc;
13765 }
13766 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013767 if (mHdrPlusRawSrcChannel) {
13768 rc = mHdrPlusRawSrcChannel->start();
13769 if (rc < 0) {
13770 LOGE("HDR+ RAW channel start failed");
13771 return rc;
13772 }
13773 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013774
13775 LOGD("All channels started");
13776 return rc;
13777}
13778
13779/*===========================================================================
13780 * FUNCTION : notifyErrorForPendingRequests
13781 *
13782 * DESCRIPTION: This function sends error for all the pending requests/buffers
13783 *
13784 * PARAMETERS : None
13785 *
13786 * RETURN : Error codes
13787 * NO_ERROR on success
13788 *
13789 *==========================================================================*/
13790int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13791{
Emilian Peev7650c122017-01-19 08:24:33 -080013792 notifyErrorFoPendingDepthData(mDepthChannel);
13793
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013794 auto pendingRequest = mPendingRequestsList.begin();
13795 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070013796
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013797 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
13798 // buffers (for which buffers aren't sent yet).
13799 while (pendingRequest != mPendingRequestsList.end() ||
13800 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
13801 if (pendingRequest == mPendingRequestsList.end() ||
13802 pendingBuffer->frame_number < pendingRequest->frame_number) {
13803 // If metadata for this frame was sent, notify about a buffer error and returns buffers
13804 // with error.
13805 for (auto &info : pendingBuffer->mPendingBufferList) {
13806 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070013807 camera3_notify_msg_t notify_msg;
13808 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13809 notify_msg.type = CAMERA3_MSG_ERROR;
13810 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013811 notify_msg.message.error.error_stream = info.stream;
13812 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013813 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013814
13815 camera3_stream_buffer_t buffer = {};
13816 buffer.acquire_fence = -1;
13817 buffer.release_fence = -1;
13818 buffer.buffer = info.buffer;
13819 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13820 buffer.stream = info.stream;
13821 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070013822 }
13823
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013824 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
13825 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
13826 pendingBuffer->frame_number > pendingRequest->frame_number) {
13827 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070013828 camera3_notify_msg_t notify_msg;
13829 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13830 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013831 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
13832 notify_msg.message.error.error_stream = nullptr;
13833 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013834 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013835
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013836 if (pendingRequest->input_buffer != nullptr) {
13837 camera3_capture_result result = {};
13838 result.frame_number = pendingRequest->frame_number;
13839 result.result = nullptr;
13840 result.input_buffer = pendingRequest->input_buffer;
13841 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013842 }
13843
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013844 mShutterDispatcher.clear(pendingRequest->frame_number);
13845 pendingRequest = mPendingRequestsList.erase(pendingRequest);
13846 } else {
13847 // If both buffers and result metadata weren't sent yet, notify about a request error
13848 // and return buffers with error.
13849 for (auto &info : pendingBuffer->mPendingBufferList) {
13850 camera3_notify_msg_t notify_msg;
13851 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13852 notify_msg.type = CAMERA3_MSG_ERROR;
13853 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13854 notify_msg.message.error.error_stream = info.stream;
13855 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
13856 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013857
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013858 camera3_stream_buffer_t buffer = {};
13859 buffer.acquire_fence = -1;
13860 buffer.release_fence = -1;
13861 buffer.buffer = info.buffer;
13862 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13863 buffer.stream = info.stream;
13864 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
13865 }
13866
13867 if (pendingRequest->input_buffer != nullptr) {
13868 camera3_capture_result result = {};
13869 result.frame_number = pendingRequest->frame_number;
13870 result.result = nullptr;
13871 result.input_buffer = pendingRequest->input_buffer;
13872 orchestrateResult(&result);
13873 }
13874
13875 mShutterDispatcher.clear(pendingRequest->frame_number);
13876 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
13877 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070013878 }
13879 }
13880
13881 /* Reset pending frame Drop list and requests list */
13882 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013883 mShutterDispatcher.clear();
13884 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070013885 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013886 LOGH("Cleared all the pending buffers ");
13887
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013888 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013889}
13890
13891bool QCamera3HardwareInterface::isOnEncoder(
13892 const cam_dimension_t max_viewfinder_size,
13893 uint32_t width, uint32_t height)
13894{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013895 return ((width > (uint32_t)max_viewfinder_size.width) ||
13896 (height > (uint32_t)max_viewfinder_size.height) ||
13897 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13898 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013899}
13900
13901/*===========================================================================
13902 * FUNCTION : setBundleInfo
13903 *
13904 * DESCRIPTION: Set bundle info for all streams that are bundle.
13905 *
13906 * PARAMETERS : None
13907 *
13908 * RETURN : NO_ERROR on success
13909 * Error codes on failure
13910 *==========================================================================*/
13911int32_t QCamera3HardwareInterface::setBundleInfo()
13912{
13913 int32_t rc = NO_ERROR;
13914
13915 if (mChannelHandle) {
13916 cam_bundle_config_t bundleInfo;
13917 memset(&bundleInfo, 0, sizeof(bundleInfo));
13918 rc = mCameraHandle->ops->get_bundle_info(
13919 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13920 if (rc != NO_ERROR) {
13921 LOGE("get_bundle_info failed");
13922 return rc;
13923 }
13924 if (mAnalysisChannel) {
13925 mAnalysisChannel->setBundleInfo(bundleInfo);
13926 }
13927 if (mSupportChannel) {
13928 mSupportChannel->setBundleInfo(bundleInfo);
13929 }
13930 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13931 it != mStreamInfo.end(); it++) {
13932 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13933 channel->setBundleInfo(bundleInfo);
13934 }
13935 if (mRawDumpChannel) {
13936 mRawDumpChannel->setBundleInfo(bundleInfo);
13937 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013938 if (mHdrPlusRawSrcChannel) {
13939 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13940 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013941 }
13942
13943 return rc;
13944}
13945
13946/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013947 * FUNCTION : setInstantAEC
13948 *
13949 * DESCRIPTION: Set Instant AEC related params.
13950 *
13951 * PARAMETERS :
13952 * @meta: CameraMetadata reference
13953 *
13954 * RETURN : NO_ERROR on success
13955 * Error codes on failure
13956 *==========================================================================*/
13957int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13958{
13959 int32_t rc = NO_ERROR;
13960 uint8_t val = 0;
13961 char prop[PROPERTY_VALUE_MAX];
13962
13963 // First try to configure instant AEC from framework metadata
13964 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13965 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13966 }
13967
13968 // If framework did not set this value, try to read from set prop.
13969 if (val == 0) {
13970 memset(prop, 0, sizeof(prop));
13971 property_get("persist.camera.instant.aec", prop, "0");
13972 val = (uint8_t)atoi(prop);
13973 }
13974
13975 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
13976 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
13977 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
13978 mInstantAEC = val;
13979 mInstantAECSettledFrameNumber = 0;
13980 mInstantAecFrameIdxCount = 0;
13981 LOGH("instantAEC value set %d",val);
13982 if (mInstantAEC) {
13983 memset(prop, 0, sizeof(prop));
13984 property_get("persist.camera.ae.instant.bound", prop, "10");
13985 int32_t aec_frame_skip_cnt = atoi(prop);
13986 if (aec_frame_skip_cnt >= 0) {
13987 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
13988 } else {
13989 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
13990 rc = BAD_VALUE;
13991 }
13992 }
13993 } else {
13994 LOGE("Bad instant aec value set %d", val);
13995 rc = BAD_VALUE;
13996 }
13997 return rc;
13998}
13999
14000/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014001 * FUNCTION : get_num_overall_buffers
14002 *
14003 * DESCRIPTION: Estimate number of pending buffers across all requests.
14004 *
14005 * PARAMETERS : None
14006 *
14007 * RETURN : Number of overall pending buffers
14008 *
14009 *==========================================================================*/
14010uint32_t PendingBuffersMap::get_num_overall_buffers()
14011{
14012 uint32_t sum_buffers = 0;
14013 for (auto &req : mPendingBuffersInRequest) {
14014 sum_buffers += req.mPendingBufferList.size();
14015 }
14016 return sum_buffers;
14017}
14018
14019/*===========================================================================
14020 * FUNCTION : removeBuf
14021 *
14022 * DESCRIPTION: Remove a matching buffer from tracker.
14023 *
14024 * PARAMETERS : @buffer: image buffer for the callback
14025 *
14026 * RETURN : None
14027 *
14028 *==========================================================================*/
14029void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14030{
14031 bool buffer_found = false;
14032 for (auto req = mPendingBuffersInRequest.begin();
14033 req != mPendingBuffersInRequest.end(); req++) {
14034 for (auto k = req->mPendingBufferList.begin();
14035 k != req->mPendingBufferList.end(); k++ ) {
14036 if (k->buffer == buffer) {
14037 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14038 req->frame_number, buffer);
14039 k = req->mPendingBufferList.erase(k);
14040 if (req->mPendingBufferList.empty()) {
14041 // Remove this request from Map
14042 req = mPendingBuffersInRequest.erase(req);
14043 }
14044 buffer_found = true;
14045 break;
14046 }
14047 }
14048 if (buffer_found) {
14049 break;
14050 }
14051 }
14052 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14053 get_num_overall_buffers());
14054}
14055
14056/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014057 * FUNCTION : getBufErrStatus
14058 *
14059 * DESCRIPTION: get buffer error status
14060 *
14061 * PARAMETERS : @buffer: buffer handle
14062 *
14063 * RETURN : Error status
14064 *
14065 *==========================================================================*/
14066int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14067{
14068 for (auto& req : mPendingBuffersInRequest) {
14069 for (auto& k : req.mPendingBufferList) {
14070 if (k.buffer == buffer)
14071 return k.bufStatus;
14072 }
14073 }
14074 return CAMERA3_BUFFER_STATUS_OK;
14075}
14076
14077/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014078 * FUNCTION : setPAAFSupport
14079 *
14080 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14081 * feature mask according to stream type and filter
14082 * arrangement
14083 *
14084 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14085 * @stream_type: stream type
14086 * @filter_arrangement: filter arrangement
14087 *
14088 * RETURN : None
14089 *==========================================================================*/
14090void QCamera3HardwareInterface::setPAAFSupport(
14091 cam_feature_mask_t& feature_mask,
14092 cam_stream_type_t stream_type,
14093 cam_color_filter_arrangement_t filter_arrangement)
14094{
Thierry Strudel3d639192016-09-09 11:52:26 -070014095 switch (filter_arrangement) {
14096 case CAM_FILTER_ARRANGEMENT_RGGB:
14097 case CAM_FILTER_ARRANGEMENT_GRBG:
14098 case CAM_FILTER_ARRANGEMENT_GBRG:
14099 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014100 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14101 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014102 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014103 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14104 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014105 }
14106 break;
14107 case CAM_FILTER_ARRANGEMENT_Y:
14108 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14109 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14110 }
14111 break;
14112 default:
14113 break;
14114 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014115 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14116 feature_mask, stream_type, filter_arrangement);
14117
14118
Thierry Strudel3d639192016-09-09 11:52:26 -070014119}
14120
14121/*===========================================================================
14122* FUNCTION : getSensorMountAngle
14123*
14124* DESCRIPTION: Retrieve sensor mount angle
14125*
14126* PARAMETERS : None
14127*
14128* RETURN : sensor mount angle in uint32_t
14129*==========================================================================*/
14130uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14131{
14132 return gCamCapability[mCameraId]->sensor_mount_angle;
14133}
14134
14135/*===========================================================================
14136* FUNCTION : getRelatedCalibrationData
14137*
14138* DESCRIPTION: Retrieve related system calibration data
14139*
14140* PARAMETERS : None
14141*
14142* RETURN : Pointer of related system calibration data
14143*==========================================================================*/
14144const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14145{
14146 return (const cam_related_system_calibration_data_t *)
14147 &(gCamCapability[mCameraId]->related_cam_calibration);
14148}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014149
14150/*===========================================================================
14151 * FUNCTION : is60HzZone
14152 *
14153 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14154 *
14155 * PARAMETERS : None
14156 *
14157 * RETURN : True if in 60Hz zone, False otherwise
14158 *==========================================================================*/
14159bool QCamera3HardwareInterface::is60HzZone()
14160{
14161 time_t t = time(NULL);
14162 struct tm lt;
14163
14164 struct tm* r = localtime_r(&t, &lt);
14165
14166 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14167 return true;
14168 else
14169 return false;
14170}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014171
14172/*===========================================================================
14173 * FUNCTION : adjustBlackLevelForCFA
14174 *
14175 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14176 * of bayer CFA (Color Filter Array).
14177 *
14178 * PARAMETERS : @input: black level pattern in the order of RGGB
14179 * @output: black level pattern in the order of CFA
14180 * @color_arrangement: CFA color arrangement
14181 *
14182 * RETURN : None
14183 *==========================================================================*/
14184template<typename T>
14185void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14186 T input[BLACK_LEVEL_PATTERN_CNT],
14187 T output[BLACK_LEVEL_PATTERN_CNT],
14188 cam_color_filter_arrangement_t color_arrangement)
14189{
14190 switch (color_arrangement) {
14191 case CAM_FILTER_ARRANGEMENT_GRBG:
14192 output[0] = input[1];
14193 output[1] = input[0];
14194 output[2] = input[3];
14195 output[3] = input[2];
14196 break;
14197 case CAM_FILTER_ARRANGEMENT_GBRG:
14198 output[0] = input[2];
14199 output[1] = input[3];
14200 output[2] = input[0];
14201 output[3] = input[1];
14202 break;
14203 case CAM_FILTER_ARRANGEMENT_BGGR:
14204 output[0] = input[3];
14205 output[1] = input[2];
14206 output[2] = input[1];
14207 output[3] = input[0];
14208 break;
14209 case CAM_FILTER_ARRANGEMENT_RGGB:
14210 output[0] = input[0];
14211 output[1] = input[1];
14212 output[2] = input[2];
14213 output[3] = input[3];
14214 break;
14215 default:
14216 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14217 break;
14218 }
14219}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014220
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014221void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14222 CameraMetadata &resultMetadata,
14223 std::shared_ptr<metadata_buffer_t> settings)
14224{
14225 if (settings == nullptr) {
14226 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14227 return;
14228 }
14229
14230 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14231 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14232 }
14233
14234 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14235 String8 str((const char *)gps_methods);
14236 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14237 }
14238
14239 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14240 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14241 }
14242
14243 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14244 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14245 }
14246
14247 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14248 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14249 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14250 }
14251
14252 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14253 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14254 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14255 }
14256
14257 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14258 int32_t fwk_thumb_size[2];
14259 fwk_thumb_size[0] = thumb_size->width;
14260 fwk_thumb_size[1] = thumb_size->height;
14261 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14262 }
14263
14264 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14265 uint8_t fwk_intent = intent[0];
14266 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14267 }
14268}
14269
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014270bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14271 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14272 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014273{
14274 if (hdrPlusRequest == nullptr) return false;
14275
14276 // Check noise reduction mode is high quality.
14277 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14278 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14279 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014280 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14281 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014282 return false;
14283 }
14284
14285 // Check edge mode is high quality.
14286 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14287 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14288 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14289 return false;
14290 }
14291
14292 if (request.num_output_buffers != 1 ||
14293 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14294 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014295 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14296 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14297 request.output_buffers[0].stream->width,
14298 request.output_buffers[0].stream->height,
14299 request.output_buffers[0].stream->format);
14300 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014301 return false;
14302 }
14303
14304 // Get a YUV buffer from pic channel.
14305 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14306 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14307 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14308 if (res != OK) {
14309 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14310 __FUNCTION__, strerror(-res), res);
14311 return false;
14312 }
14313
14314 pbcamera::StreamBuffer buffer;
14315 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014316 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014317 buffer.data = yuvBuffer->buffer;
14318 buffer.dataSize = yuvBuffer->frame_len;
14319
14320 pbcamera::CaptureRequest pbRequest;
14321 pbRequest.id = request.frame_number;
14322 pbRequest.outputBuffers.push_back(buffer);
14323
14324 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014325 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014326 if (res != OK) {
14327 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14328 strerror(-res), res);
14329 return false;
14330 }
14331
14332 hdrPlusRequest->yuvBuffer = yuvBuffer;
14333 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14334
14335 return true;
14336}
14337
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014338status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14339{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014340 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14341 return OK;
14342 }
14343
14344 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14345 if (res != OK) {
14346 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14347 strerror(-res), res);
14348 return res;
14349 }
14350 gHdrPlusClientOpening = true;
14351
14352 return OK;
14353}
14354
Chien-Yu Chenee335912017-02-09 17:53:20 -080014355status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14356{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014357 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014358
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014359 // Check if gHdrPlusClient is opened or being opened.
14360 if (gHdrPlusClient == nullptr) {
14361 if (gHdrPlusClientOpening) {
14362 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14363 return OK;
14364 }
14365
14366 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014367 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014368 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14369 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014370 return res;
14371 }
14372
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014373 // When opening HDR+ client completes, HDR+ mode will be enabled.
14374 return OK;
14375
Chien-Yu Chenee335912017-02-09 17:53:20 -080014376 }
14377
14378 // Configure stream for HDR+.
14379 res = configureHdrPlusStreamsLocked();
14380 if (res != OK) {
14381 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014382 return res;
14383 }
14384
14385 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14386 res = gHdrPlusClient->setZslHdrPlusMode(true);
14387 if (res != OK) {
14388 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014389 return res;
14390 }
14391
14392 mHdrPlusModeEnabled = true;
14393 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14394
14395 return OK;
14396}
14397
14398void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14399{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014400 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014401 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014402 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14403 if (res != OK) {
14404 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14405 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014406
14407 // Close HDR+ client so Easel can enter low power mode.
14408 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14409 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014410 }
14411
14412 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014413 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014414 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14415}
14416
14417status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014418{
14419 pbcamera::InputConfiguration inputConfig;
14420 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14421 status_t res = OK;
14422
14423 // Configure HDR+ client streams.
14424 // Get input config.
14425 if (mHdrPlusRawSrcChannel) {
14426 // HDR+ input buffers will be provided by HAL.
14427 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14428 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14429 if (res != OK) {
14430 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14431 __FUNCTION__, strerror(-res), res);
14432 return res;
14433 }
14434
14435 inputConfig.isSensorInput = false;
14436 } else {
14437 // Sensor MIPI will send data to Easel.
14438 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014439 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014440 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14441 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14442 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14443 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14444 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14445 if (mSensorModeInfo.num_raw_bits != 10) {
14446 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14447 mSensorModeInfo.num_raw_bits);
14448 return BAD_VALUE;
14449 }
14450
14451 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014452 }
14453
14454 // Get output configurations.
14455 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014456 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014457
14458 // Easel may need to output YUV output buffers if mPictureChannel was created.
14459 pbcamera::StreamConfiguration yuvOutputConfig;
14460 if (mPictureChannel != nullptr) {
14461 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14462 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14463 if (res != OK) {
14464 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14465 __FUNCTION__, strerror(-res), res);
14466
14467 return res;
14468 }
14469
14470 outputStreamConfigs.push_back(yuvOutputConfig);
14471 }
14472
14473 // TODO: consider other channels for YUV output buffers.
14474
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014475 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014476 if (res != OK) {
14477 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14478 strerror(-res), res);
14479 return res;
14480 }
14481
14482 return OK;
14483}
14484
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014485void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14486{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014487 if (client == nullptr) {
14488 ALOGE("%s: Opened client is null.", __FUNCTION__);
14489 return;
14490 }
14491
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014492 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014493 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14494
14495 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014496 if (!gHdrPlusClientOpening) {
14497 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14498 return;
14499 }
14500
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014501 gHdrPlusClient = std::move(client);
14502 gHdrPlusClientOpening = false;
14503
14504 // Set static metadata.
14505 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14506 if (res != OK) {
14507 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14508 __FUNCTION__, strerror(-res), res);
14509 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14510 gHdrPlusClient = nullptr;
14511 return;
14512 }
14513
14514 // Enable HDR+ mode.
14515 res = enableHdrPlusModeLocked();
14516 if (res != OK) {
14517 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14518 }
14519}
14520
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014521void QCamera3HardwareInterface::onOpenFailed(status_t err)
14522{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014523 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14524 Mutex::Autolock l(gHdrPlusClientLock);
14525 gHdrPlusClientOpening = false;
14526}
14527
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014528void QCamera3HardwareInterface::onFatalError()
14529{
14530 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14531
14532 // Set HAL state to error.
14533 pthread_mutex_lock(&mMutex);
14534 mState = ERROR;
14535 pthread_mutex_unlock(&mMutex);
14536
14537 handleCameraDeviceError();
14538}
14539
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014540void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014541 const camera_metadata_t &resultMetadata)
14542{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014543 if (result != nullptr) {
14544 if (result->outputBuffers.size() != 1) {
14545 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14546 result->outputBuffers.size());
14547 return;
14548 }
14549
14550 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14551 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14552 result->outputBuffers[0].streamId);
14553 return;
14554 }
14555
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014556 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014557 HdrPlusPendingRequest pendingRequest;
14558 {
14559 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14560 auto req = mHdrPlusPendingRequests.find(result->requestId);
14561 pendingRequest = req->second;
14562 }
14563
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014564 // Update the result metadata with the settings of the HDR+ still capture request because
14565 // the result metadata belongs to a ZSL buffer.
14566 CameraMetadata metadata;
14567 metadata = &resultMetadata;
14568 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14569 camera_metadata_t* updatedResultMetadata = metadata.release();
14570
14571 QCamera3PicChannel *picChannel =
14572 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14573
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014574 // Check if dumping HDR+ YUV output is enabled.
14575 char prop[PROPERTY_VALUE_MAX];
14576 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14577 bool dumpYuvOutput = atoi(prop);
14578
14579 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014580 // Dump yuv buffer to a ppm file.
14581 pbcamera::StreamConfiguration outputConfig;
14582 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14583 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14584 if (rc == OK) {
14585 char buf[FILENAME_MAX] = {};
14586 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14587 result->requestId, result->outputBuffers[0].streamId,
14588 outputConfig.image.width, outputConfig.image.height);
14589
14590 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14591 } else {
14592 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14593 __FUNCTION__, strerror(-rc), rc);
14594 }
14595 }
14596
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014597 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14598 auto halMetadata = std::make_shared<metadata_buffer_t>();
14599 clear_metadata_buffer(halMetadata.get());
14600
14601 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14602 // encoding.
14603 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14604 halStreamId, /*minFrameDuration*/0);
14605 if (res == OK) {
14606 // Return the buffer to pic channel for encoding.
14607 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14608 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14609 halMetadata);
14610 } else {
14611 // Return the buffer without encoding.
14612 // TODO: This should not happen but we may want to report an error buffer to camera
14613 // service.
14614 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14615 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14616 strerror(-res), res);
14617 }
14618
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014619 // Find the timestamp
14620 camera_metadata_ro_entry_t entry;
14621 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14622 ANDROID_SENSOR_TIMESTAMP, &entry);
14623 if (res != OK) {
14624 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14625 __FUNCTION__, result->requestId, strerror(-res), res);
14626 } else {
14627 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14628 }
14629
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014630 // Send HDR+ metadata to framework.
14631 {
14632 pthread_mutex_lock(&mMutex);
14633
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014634 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14635 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014636 pthread_mutex_unlock(&mMutex);
14637 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014638
14639 // Remove the HDR+ pending request.
14640 {
14641 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14642 auto req = mHdrPlusPendingRequests.find(result->requestId);
14643 mHdrPlusPendingRequests.erase(req);
14644 }
14645 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014646}
14647
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014648void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14649{
14650 if (failedResult == nullptr) {
14651 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14652 return;
14653 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014654
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014655 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014656
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014657 // Remove the pending HDR+ request.
14658 {
14659 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14660 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14661
14662 // Return the buffer to pic channel.
14663 QCamera3PicChannel *picChannel =
14664 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14665 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14666
14667 mHdrPlusPendingRequests.erase(pendingRequest);
14668 }
14669
14670 pthread_mutex_lock(&mMutex);
14671
14672 // Find the pending buffers.
14673 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14674 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14675 if (pendingBuffers->frame_number == failedResult->requestId) {
14676 break;
14677 }
14678 pendingBuffers++;
14679 }
14680
14681 // Send out buffer errors for the pending buffers.
14682 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14683 std::vector<camera3_stream_buffer_t> streamBuffers;
14684 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14685 // Prepare a stream buffer.
14686 camera3_stream_buffer_t streamBuffer = {};
14687 streamBuffer.stream = buffer.stream;
14688 streamBuffer.buffer = buffer.buffer;
14689 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14690 streamBuffer.acquire_fence = -1;
14691 streamBuffer.release_fence = -1;
14692
14693 streamBuffers.push_back(streamBuffer);
14694
14695 // Send out error buffer event.
14696 camera3_notify_msg_t notify_msg = {};
14697 notify_msg.type = CAMERA3_MSG_ERROR;
14698 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14699 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14700 notify_msg.message.error.error_stream = buffer.stream;
14701
14702 orchestrateNotify(&notify_msg);
14703 }
14704
14705 camera3_capture_result_t result = {};
14706 result.frame_number = pendingBuffers->frame_number;
14707 result.num_output_buffers = streamBuffers.size();
14708 result.output_buffers = &streamBuffers[0];
14709
14710 // Send out result with buffer errors.
14711 orchestrateResult(&result);
14712
14713 // Remove pending buffers.
14714 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14715 }
14716
14717 // Remove pending request.
14718 auto halRequest = mPendingRequestsList.begin();
14719 while (halRequest != mPendingRequestsList.end()) {
14720 if (halRequest->frame_number == failedResult->requestId) {
14721 mPendingRequestsList.erase(halRequest);
14722 break;
14723 }
14724 halRequest++;
14725 }
14726
14727 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014728}
14729
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014730
14731ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14732 mParent(parent) {}
14733
14734void ShutterDispatcher::expectShutter(uint32_t frameNumber)
14735{
14736 std::lock_guard<std::mutex> lock(mLock);
14737 mShutters.emplace(frameNumber, Shutter());
14738}
14739
14740void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
14741{
14742 std::lock_guard<std::mutex> lock(mLock);
14743
14744 // Make this frame's shutter ready.
14745 auto shutter = mShutters.find(frameNumber);
14746 if (shutter == mShutters.end()) {
14747 // Shutter was already sent.
14748 return;
14749 }
14750
14751 shutter->second.ready = true;
14752 shutter->second.timestamp = timestamp;
14753
14754 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
14755 shutter = mShutters.begin();
14756 while (shutter != mShutters.end()) {
14757 if (!shutter->second.ready) {
14758 // If this shutter is not ready, the following shutters can't be sent.
14759 break;
14760 }
14761
14762 camera3_notify_msg_t msg = {};
14763 msg.type = CAMERA3_MSG_SHUTTER;
14764 msg.message.shutter.frame_number = shutter->first;
14765 msg.message.shutter.timestamp = shutter->second.timestamp;
14766 mParent->orchestrateNotify(&msg);
14767
14768 shutter = mShutters.erase(shutter);
14769 }
14770}
14771
14772void ShutterDispatcher::clear(uint32_t frameNumber)
14773{
14774 std::lock_guard<std::mutex> lock(mLock);
14775 mShutters.erase(frameNumber);
14776}
14777
14778void ShutterDispatcher::clear()
14779{
14780 std::lock_guard<std::mutex> lock(mLock);
14781
14782 // Log errors for stale shutters.
14783 for (auto &shutter : mShutters) {
14784 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
14785 __FUNCTION__, shutter.first, shutter.second.ready,
14786 shutter.second.timestamp);
14787 }
14788 mShutters.clear();
14789}
14790
14791OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
14792 mParent(parent) {}
14793
14794status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
14795{
14796 std::lock_guard<std::mutex> lock(mLock);
14797 mStreamBuffers.clear();
14798 if (!streamList) {
14799 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
14800 return -EINVAL;
14801 }
14802
14803 // Create a "frame-number -> buffer" map for each stream.
14804 for (uint32_t i = 0; i < streamList->num_streams; i++) {
14805 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
14806 }
14807
14808 return OK;
14809}
14810
14811status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
14812{
14813 std::lock_guard<std::mutex> lock(mLock);
14814
14815 // Find the "frame-number -> buffer" map for the stream.
14816 auto buffers = mStreamBuffers.find(stream);
14817 if (buffers == mStreamBuffers.end()) {
14818 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
14819 return -EINVAL;
14820 }
14821
14822 // Create an unready buffer for this frame number.
14823 buffers->second.emplace(frameNumber, Buffer());
14824 return OK;
14825}
14826
14827void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
14828 const camera3_stream_buffer_t &buffer)
14829{
14830 std::lock_guard<std::mutex> lock(mLock);
14831
14832 // Find the frame number -> buffer map for the stream.
14833 auto buffers = mStreamBuffers.find(buffer.stream);
14834 if (buffers == mStreamBuffers.end()) {
14835 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
14836 return;
14837 }
14838
14839 // Find the unready buffer this frame number and mark it ready.
14840 auto pendingBuffer = buffers->second.find(frameNumber);
14841 if (pendingBuffer == buffers->second.end()) {
14842 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
14843 return;
14844 }
14845
14846 pendingBuffer->second.ready = true;
14847 pendingBuffer->second.buffer = buffer;
14848
14849 // Iterate through the buffers and send out buffers until the one that's not ready yet.
14850 pendingBuffer = buffers->second.begin();
14851 while (pendingBuffer != buffers->second.end()) {
14852 if (!pendingBuffer->second.ready) {
14853 // If this buffer is not ready, the following buffers can't be sent.
14854 break;
14855 }
14856
14857 camera3_capture_result_t result = {};
14858 result.frame_number = pendingBuffer->first;
14859 result.num_output_buffers = 1;
14860 result.output_buffers = &pendingBuffer->second.buffer;
14861
14862 // Send out result with buffer errors.
14863 mParent->orchestrateResult(&result);
14864
14865 pendingBuffer = buffers->second.erase(pendingBuffer);
14866 }
14867}
14868
14869void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
14870{
14871 std::lock_guard<std::mutex> lock(mLock);
14872
14873 // Log errors for stale buffers.
14874 for (auto &buffers : mStreamBuffers) {
14875 for (auto &buffer : buffers.second) {
14876 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
14877 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
14878 }
14879 buffers.second.clear();
14880 }
14881
14882 if (clearConfiguredStreams) {
14883 mStreamBuffers.clear();
14884 }
14885}
14886
Thierry Strudel3d639192016-09-09 11:52:26 -070014887}; //end namespace qcamera