blob: 5680a51dac6fd4e7691d6c87fbbd81317552f8c8 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700136// Whether to check for the GPU stride padding, or use the default
137//#define CHECK_GPU_PIXEL_ALIGNMENT
138
Thierry Strudel3d639192016-09-09 11:52:26 -0700139cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
140const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
141extern pthread_mutex_t gCamLock;
142volatile uint32_t gCamHal3LogLevel = 1;
143extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700144
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800145// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700146// The following Easel related variables must be protected by gHdrPlusClientLock.
147EaselManagerClient gEaselManagerClient;
148bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
149std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
150bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700151bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700152bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800154// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
155bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
157Mutex gHdrPlusClientLock; // Protect above Easel related variables.
158
Thierry Strudel3d639192016-09-09 11:52:26 -0700159
160const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
161 {"On", CAM_CDS_MODE_ON},
162 {"Off", CAM_CDS_MODE_OFF},
163 {"Auto",CAM_CDS_MODE_AUTO}
164};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700165const QCamera3HardwareInterface::QCameraMap<
166 camera_metadata_enum_android_video_hdr_mode_t,
167 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
168 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
169 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
170};
171
Thierry Strudel54dc9782017-02-15 12:12:10 -0800172const QCamera3HardwareInterface::QCameraMap<
173 camera_metadata_enum_android_binning_correction_mode_t,
174 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
175 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
176 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
177};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700178
179const QCamera3HardwareInterface::QCameraMap<
180 camera_metadata_enum_android_ir_mode_t,
181 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
182 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
183 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
184 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
185};
Thierry Strudel3d639192016-09-09 11:52:26 -0700186
187const QCamera3HardwareInterface::QCameraMap<
188 camera_metadata_enum_android_control_effect_mode_t,
189 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
190 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
191 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
192 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
193 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
194 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
195 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
197 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
198 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202 camera_metadata_enum_android_control_awb_mode_t,
203 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
204 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
205 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
206 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
207 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
208 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
209 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
210 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
211 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
212 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
213};
214
215const QCamera3HardwareInterface::QCameraMap<
216 camera_metadata_enum_android_control_scene_mode_t,
217 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
218 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
219 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
220 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
222 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
223 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
225 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
226 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
227 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
228 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
229 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
230 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
231 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
232 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800233 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
234 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700235};
236
237const QCamera3HardwareInterface::QCameraMap<
238 camera_metadata_enum_android_control_af_mode_t,
239 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
240 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
241 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
242 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
243 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
244 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
245 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
246 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
247};
248
249const QCamera3HardwareInterface::QCameraMap<
250 camera_metadata_enum_android_color_correction_aberration_mode_t,
251 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
252 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
253 CAM_COLOR_CORRECTION_ABERRATION_OFF },
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
255 CAM_COLOR_CORRECTION_ABERRATION_FAST },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
257 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
258};
259
260const QCamera3HardwareInterface::QCameraMap<
261 camera_metadata_enum_android_control_ae_antibanding_mode_t,
262 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
264 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
267};
268
269const QCamera3HardwareInterface::QCameraMap<
270 camera_metadata_enum_android_control_ae_mode_t,
271 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
272 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
273 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
274 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
275 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
277};
278
279const QCamera3HardwareInterface::QCameraMap<
280 camera_metadata_enum_android_flash_mode_t,
281 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
282 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
283 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
284 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
285};
286
287const QCamera3HardwareInterface::QCameraMap<
288 camera_metadata_enum_android_statistics_face_detect_mode_t,
289 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
290 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
291 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
292 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
293};
294
295const QCamera3HardwareInterface::QCameraMap<
296 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
297 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
298 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
299 CAM_FOCUS_UNCALIBRATED },
300 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
301 CAM_FOCUS_APPROXIMATE },
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
303 CAM_FOCUS_CALIBRATED }
304};
305
306const QCamera3HardwareInterface::QCameraMap<
307 camera_metadata_enum_android_lens_state_t,
308 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
309 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
310 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
311};
312
313const int32_t available_thumbnail_sizes[] = {0, 0,
314 176, 144,
315 240, 144,
316 256, 144,
317 240, 160,
318 256, 154,
319 240, 240,
320 320, 240};
321
322const QCamera3HardwareInterface::QCameraMap<
323 camera_metadata_enum_android_sensor_test_pattern_mode_t,
324 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
331};
332
333/* Since there is no mapping for all the options some Android enum are not listed.
334 * Also, the order in this list is important because while mapping from HAL to Android it will
335 * traverse from lower to higher index which means that for HAL values that are map to different
336 * Android values, the traverse logic will select the first one found.
337 */
338const QCamera3HardwareInterface::QCameraMap<
339 camera_metadata_enum_android_sensor_reference_illuminant1_t,
340 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
357};
358
359const QCamera3HardwareInterface::QCameraMap<
360 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
361 { 60, CAM_HFR_MODE_60FPS},
362 { 90, CAM_HFR_MODE_90FPS},
363 { 120, CAM_HFR_MODE_120FPS},
364 { 150, CAM_HFR_MODE_150FPS},
365 { 180, CAM_HFR_MODE_180FPS},
366 { 210, CAM_HFR_MODE_210FPS},
367 { 240, CAM_HFR_MODE_240FPS},
368 { 480, CAM_HFR_MODE_480FPS},
369};
370
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700371const QCamera3HardwareInterface::QCameraMap<
372 qcamera3_ext_instant_aec_mode_t,
373 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
374 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
375 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
376 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
377};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800378
379const QCamera3HardwareInterface::QCameraMap<
380 qcamera3_ext_exposure_meter_mode_t,
381 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
382 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
383 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
385 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
386 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
387 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
388 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
389};
390
391const QCamera3HardwareInterface::QCameraMap<
392 qcamera3_ext_iso_mode_t,
393 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
394 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
395 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
396 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
397 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
398 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
399 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
400 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
401 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
402};
403
Thierry Strudel3d639192016-09-09 11:52:26 -0700404camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
405 .initialize = QCamera3HardwareInterface::initialize,
406 .configure_streams = QCamera3HardwareInterface::configure_streams,
407 .register_stream_buffers = NULL,
408 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
409 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
410 .get_metadata_vendor_tag_ops = NULL,
411 .dump = QCamera3HardwareInterface::dump,
412 .flush = QCamera3HardwareInterface::flush,
413 .reserved = {0},
414};
415
416// initialise to some default value
417uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
418
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700419static inline void logEaselEvent(const char *tag, const char *event) {
420 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
421 struct timespec ts = {};
422 static int64_t kMsPerSec = 1000;
423 static int64_t kNsPerMs = 1000000;
424 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
425 if (res != OK) {
426 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
427 } else {
428 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
429 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
430 }
431 }
432}
433
Thierry Strudel3d639192016-09-09 11:52:26 -0700434/*===========================================================================
435 * FUNCTION : QCamera3HardwareInterface
436 *
437 * DESCRIPTION: constructor of QCamera3HardwareInterface
438 *
439 * PARAMETERS :
440 * @cameraId : camera ID
441 *
442 * RETURN : none
443 *==========================================================================*/
444QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
445 const camera_module_callbacks_t *callbacks)
446 : mCameraId(cameraId),
447 mCameraHandle(NULL),
448 mCameraInitialized(false),
449 mCallbackOps(NULL),
450 mMetadataChannel(NULL),
451 mPictureChannel(NULL),
452 mRawChannel(NULL),
453 mSupportChannel(NULL),
454 mAnalysisChannel(NULL),
455 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700456 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800458 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800459 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700460 mChannelHandle(0),
461 mFirstConfiguration(true),
462 mFlush(false),
463 mFlushPerf(false),
464 mParamHeap(NULL),
465 mParameters(NULL),
466 mPrevParameters(NULL),
467 m_bIsVideo(false),
468 m_bIs4KVideo(false),
469 m_bEisSupportedSize(false),
470 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800471 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700472 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700473 mShutterDispatcher(this),
474 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700475 mMinProcessedFrameDuration(0),
476 mMinJpegFrameDuration(0),
477 mMinRawFrameDuration(0),
478 mMetaFrameCount(0U),
479 mUpdateDebugLevel(false),
480 mCallbacks(callbacks),
481 mCaptureIntent(0),
482 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700483 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800484 /* DevCamDebug metadata internal m control*/
485 mDevCamDebugMetaEnable(0),
486 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700487 mBatchSize(0),
488 mToBeQueuedVidBufs(0),
489 mHFRVideoFps(DEFAULT_VIDEO_FPS),
490 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800491 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800492 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700493 mFirstFrameNumberInBatch(0),
494 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800495 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700496 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
497 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000498 mPDSupported(false),
499 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700500 mInstantAEC(false),
501 mResetInstantAEC(false),
502 mInstantAECSettledFrameNumber(0),
503 mAecSkipDisplayFrameBound(0),
504 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800505 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700506 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700507 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700508 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700509 mState(CLOSED),
510 mIsDeviceLinked(false),
511 mIsMainCamera(true),
512 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700513 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800514 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800515 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700516 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800517 mIsApInputUsedForHdrPlus(false),
518 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800519 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700520{
521 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700522 mCommon.init(gCamCapability[cameraId]);
523 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700524#ifndef USE_HAL_3_3
525 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
526#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700527 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700528#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700529 mCameraDevice.common.close = close_camera_device;
530 mCameraDevice.ops = &mCameraOps;
531 mCameraDevice.priv = this;
532 gCamCapability[cameraId]->version = CAM_HAL_V3;
533 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
534 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
535 gCamCapability[cameraId]->min_num_pp_bufs = 3;
536
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800537 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700538
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800539 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700540 mPendingLiveRequest = 0;
541 mCurrentRequestId = -1;
542 pthread_mutex_init(&mMutex, NULL);
543
544 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
545 mDefaultMetadata[i] = NULL;
546
547 // Getting system props of different kinds
548 char prop[PROPERTY_VALUE_MAX];
549 memset(prop, 0, sizeof(prop));
550 property_get("persist.camera.raw.dump", prop, "0");
551 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800552 property_get("persist.camera.hal3.force.hdr", prop, "0");
553 mForceHdrSnapshot = atoi(prop);
554
Thierry Strudel3d639192016-09-09 11:52:26 -0700555 if (mEnableRawDump)
556 LOGD("Raw dump from Camera HAL enabled");
557
558 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
559 memset(mLdafCalib, 0, sizeof(mLdafCalib));
560
561 memset(prop, 0, sizeof(prop));
562 property_get("persist.camera.tnr.preview", prop, "0");
563 m_bTnrPreview = (uint8_t)atoi(prop);
564
565 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800566 property_get("persist.camera.swtnr.preview", prop, "1");
567 m_bSwTnrPreview = (uint8_t)atoi(prop);
568
569 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700570 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700571 m_bTnrVideo = (uint8_t)atoi(prop);
572
573 memset(prop, 0, sizeof(prop));
574 property_get("persist.camera.avtimer.debug", prop, "0");
575 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800576 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700577
Thierry Strudel54dc9782017-02-15 12:12:10 -0800578 memset(prop, 0, sizeof(prop));
579 property_get("persist.camera.cacmode.disable", prop, "0");
580 m_cacModeDisabled = (uint8_t)atoi(prop);
581
Thierry Strudel3d639192016-09-09 11:52:26 -0700582 //Load and read GPU library.
583 lib_surface_utils = NULL;
584 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700585 mSurfaceStridePadding = CAM_PAD_TO_64;
586#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700587 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
588 if (lib_surface_utils) {
589 *(void **)&LINK_get_surface_pixel_alignment =
590 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
591 if (LINK_get_surface_pixel_alignment) {
592 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
593 }
594 dlclose(lib_surface_utils);
595 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700596#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000597 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
598 mPDSupported = (0 <= mPDIndex) ? true : false;
599
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700600 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700601}
602
603/*===========================================================================
604 * FUNCTION : ~QCamera3HardwareInterface
605 *
606 * DESCRIPTION: destructor of QCamera3HardwareInterface
607 *
608 * PARAMETERS : none
609 *
610 * RETURN : none
611 *==========================================================================*/
612QCamera3HardwareInterface::~QCamera3HardwareInterface()
613{
614 LOGD("E");
615
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800616 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700617
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800618 // Disable power hint and enable the perf lock for close camera
619 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
620 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
621
622 // unlink of dualcam during close camera
623 if (mIsDeviceLinked) {
624 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
625 &m_pDualCamCmdPtr->bundle_info;
626 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
627 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
628 pthread_mutex_lock(&gCamLock);
629
630 if (mIsMainCamera == 1) {
631 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
632 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
633 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
634 // related session id should be session id of linked session
635 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
636 } else {
637 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
638 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
639 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
640 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
641 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800642 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800643 pthread_mutex_unlock(&gCamLock);
644
645 rc = mCameraHandle->ops->set_dual_cam_cmd(
646 mCameraHandle->camera_handle);
647 if (rc < 0) {
648 LOGE("Dualcam: Unlink failed, but still proceed to close");
649 }
650 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700651
652 /* We need to stop all streams before deleting any stream */
653 if (mRawDumpChannel) {
654 mRawDumpChannel->stop();
655 }
656
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700657 if (mHdrPlusRawSrcChannel) {
658 mHdrPlusRawSrcChannel->stop();
659 }
660
Thierry Strudel3d639192016-09-09 11:52:26 -0700661 // NOTE: 'camera3_stream_t *' objects are already freed at
662 // this stage by the framework
663 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
664 it != mStreamInfo.end(); it++) {
665 QCamera3ProcessingChannel *channel = (*it)->channel;
666 if (channel) {
667 channel->stop();
668 }
669 }
670 if (mSupportChannel)
671 mSupportChannel->stop();
672
673 if (mAnalysisChannel) {
674 mAnalysisChannel->stop();
675 }
676 if (mMetadataChannel) {
677 mMetadataChannel->stop();
678 }
679 if (mChannelHandle) {
680 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
681 mChannelHandle);
682 LOGD("stopping channel %d", mChannelHandle);
683 }
684
685 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
686 it != mStreamInfo.end(); it++) {
687 QCamera3ProcessingChannel *channel = (*it)->channel;
688 if (channel)
689 delete channel;
690 free (*it);
691 }
692 if (mSupportChannel) {
693 delete mSupportChannel;
694 mSupportChannel = NULL;
695 }
696
697 if (mAnalysisChannel) {
698 delete mAnalysisChannel;
699 mAnalysisChannel = NULL;
700 }
701 if (mRawDumpChannel) {
702 delete mRawDumpChannel;
703 mRawDumpChannel = NULL;
704 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700705 if (mHdrPlusRawSrcChannel) {
706 delete mHdrPlusRawSrcChannel;
707 mHdrPlusRawSrcChannel = NULL;
708 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700709 if (mDummyBatchChannel) {
710 delete mDummyBatchChannel;
711 mDummyBatchChannel = NULL;
712 }
713
714 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800715 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700716
717 if (mMetadataChannel) {
718 delete mMetadataChannel;
719 mMetadataChannel = NULL;
720 }
721
722 /* Clean up all channels */
723 if (mCameraInitialized) {
724 if(!mFirstConfiguration){
725 //send the last unconfigure
726 cam_stream_size_info_t stream_config_info;
727 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
728 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
729 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800730 m_bIs4KVideo ? 0 :
731 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700732 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700733 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
734 stream_config_info);
735 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
736 if (rc < 0) {
737 LOGE("set_parms failed for unconfigure");
738 }
739 }
740 deinitParameters();
741 }
742
743 if (mChannelHandle) {
744 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
745 mChannelHandle);
746 LOGH("deleting channel %d", mChannelHandle);
747 mChannelHandle = 0;
748 }
749
750 if (mState != CLOSED)
751 closeCamera();
752
753 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
754 req.mPendingBufferList.clear();
755 }
756 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700757 for (pendingRequestIterator i = mPendingRequestsList.begin();
758 i != mPendingRequestsList.end();) {
759 i = erasePendingRequest(i);
760 }
761 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
762 if (mDefaultMetadata[i])
763 free_camera_metadata(mDefaultMetadata[i]);
764
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800765 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700766
767 pthread_cond_destroy(&mRequestCond);
768
769 pthread_cond_destroy(&mBuffersCond);
770
771 pthread_mutex_destroy(&mMutex);
772 LOGD("X");
773}
774
775/*===========================================================================
776 * FUNCTION : erasePendingRequest
777 *
778 * DESCRIPTION: function to erase a desired pending request after freeing any
779 * allocated memory
780 *
781 * PARAMETERS :
782 * @i : iterator pointing to pending request to be erased
783 *
784 * RETURN : iterator pointing to the next request
785 *==========================================================================*/
786QCamera3HardwareInterface::pendingRequestIterator
787 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
788{
789 if (i->input_buffer != NULL) {
790 free(i->input_buffer);
791 i->input_buffer = NULL;
792 }
793 if (i->settings != NULL)
794 free_camera_metadata((camera_metadata_t*)i->settings);
795 return mPendingRequestsList.erase(i);
796}
797
798/*===========================================================================
799 * FUNCTION : camEvtHandle
800 *
801 * DESCRIPTION: Function registered to mm-camera-interface to handle events
802 *
803 * PARAMETERS :
804 * @camera_handle : interface layer camera handle
805 * @evt : ptr to event
806 * @user_data : user data ptr
807 *
808 * RETURN : none
809 *==========================================================================*/
810void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
811 mm_camera_event_t *evt,
812 void *user_data)
813{
814 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
815 if (obj && evt) {
816 switch(evt->server_event_type) {
817 case CAM_EVENT_TYPE_DAEMON_DIED:
818 pthread_mutex_lock(&obj->mMutex);
819 obj->mState = ERROR;
820 pthread_mutex_unlock(&obj->mMutex);
821 LOGE("Fatal, camera daemon died");
822 break;
823
824 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
825 LOGD("HAL got request pull from Daemon");
826 pthread_mutex_lock(&obj->mMutex);
827 obj->mWokenUpByDaemon = true;
828 obj->unblockRequestIfNecessary();
829 pthread_mutex_unlock(&obj->mMutex);
830 break;
831
832 default:
833 LOGW("Warning: Unhandled event %d",
834 evt->server_event_type);
835 break;
836 }
837 } else {
838 LOGE("NULL user_data/evt");
839 }
840}
841
842/*===========================================================================
843 * FUNCTION : openCamera
844 *
845 * DESCRIPTION: open camera
846 *
847 * PARAMETERS :
848 * @hw_device : double ptr for camera device struct
849 *
850 * RETURN : int32_t type of status
851 * NO_ERROR -- success
852 * none-zero failure code
853 *==========================================================================*/
854int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
855{
856 int rc = 0;
857 if (mState != CLOSED) {
858 *hw_device = NULL;
859 return PERMISSION_DENIED;
860 }
861
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700862 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800863 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700864 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
865 mCameraId);
866
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700867 if (mCameraHandle) {
868 LOGE("Failure: Camera already opened");
869 return ALREADY_EXISTS;
870 }
871
872 {
873 Mutex::Autolock l(gHdrPlusClientLock);
874 if (gEaselManagerClient.isEaselPresentOnDevice()) {
875 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
876 rc = gEaselManagerClient.resume();
877 if (rc != 0) {
878 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
879 return rc;
880 }
881 }
882 }
883
Thierry Strudel3d639192016-09-09 11:52:26 -0700884 rc = openCamera();
885 if (rc == 0) {
886 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800887 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700888 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700889
890 // Suspend Easel because opening camera failed.
891 {
892 Mutex::Autolock l(gHdrPlusClientLock);
893 if (gEaselManagerClient.isEaselPresentOnDevice()) {
894 status_t suspendErr = gEaselManagerClient.suspend();
895 if (suspendErr != 0) {
896 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
897 strerror(-suspendErr), suspendErr);
898 }
899 }
900 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800901 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700902
Thierry Strudel3d639192016-09-09 11:52:26 -0700903 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
904 mCameraId, rc);
905
906 if (rc == NO_ERROR) {
907 mState = OPENED;
908 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800909
Thierry Strudel3d639192016-09-09 11:52:26 -0700910 return rc;
911}
912
913/*===========================================================================
914 * FUNCTION : openCamera
915 *
916 * DESCRIPTION: open camera
917 *
918 * PARAMETERS : none
919 *
920 * RETURN : int32_t type of status
921 * NO_ERROR -- success
922 * none-zero failure code
923 *==========================================================================*/
924int QCamera3HardwareInterface::openCamera()
925{
926 int rc = 0;
927 char value[PROPERTY_VALUE_MAX];
928
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800929 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800930
Thierry Strudel3d639192016-09-09 11:52:26 -0700931 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
932 if (rc < 0) {
933 LOGE("Failed to reserve flash for camera id: %d",
934 mCameraId);
935 return UNKNOWN_ERROR;
936 }
937
938 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
939 if (rc) {
940 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
941 return rc;
942 }
943
944 if (!mCameraHandle) {
945 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
946 return -ENODEV;
947 }
948
949 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
950 camEvtHandle, (void *)this);
951
952 if (rc < 0) {
953 LOGE("Error, failed to register event callback");
954 /* Not closing camera here since it is already handled in destructor */
955 return FAILED_TRANSACTION;
956 }
957
958 mExifParams.debug_params =
959 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
960 if (mExifParams.debug_params) {
961 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
962 } else {
963 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
964 return NO_MEMORY;
965 }
966 mFirstConfiguration = true;
967
968 //Notify display HAL that a camera session is active.
969 //But avoid calling the same during bootup because camera service might open/close
970 //cameras at boot time during its initialization and display service will also internally
971 //wait for camera service to initialize first while calling this display API, resulting in a
972 //deadlock situation. Since boot time camera open/close calls are made only to fetch
973 //capabilities, no need of this display bw optimization.
974 //Use "service.bootanim.exit" property to know boot status.
975 property_get("service.bootanim.exit", value, "0");
976 if (atoi(value) == 1) {
977 pthread_mutex_lock(&gCamLock);
978 if (gNumCameraSessions++ == 0) {
979 setCameraLaunchStatus(true);
980 }
981 pthread_mutex_unlock(&gCamLock);
982 }
983
984 //fill the session id needed while linking dual cam
985 pthread_mutex_lock(&gCamLock);
986 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
987 &sessionId[mCameraId]);
988 pthread_mutex_unlock(&gCamLock);
989
990 if (rc < 0) {
991 LOGE("Error, failed to get sessiion id");
992 return UNKNOWN_ERROR;
993 } else {
994 //Allocate related cam sync buffer
995 //this is needed for the payload that goes along with bundling cmd for related
996 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700997 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
998 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700999 if(rc != OK) {
1000 rc = NO_MEMORY;
1001 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1002 return NO_MEMORY;
1003 }
1004
1005 //Map memory for related cam sync buffer
1006 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001007 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1008 m_pDualCamCmdHeap->getFd(0),
1009 sizeof(cam_dual_camera_cmd_info_t),
1010 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001011 if(rc < 0) {
1012 LOGE("Dualcam: failed to map Related cam sync buffer");
1013 rc = FAILED_TRANSACTION;
1014 return NO_MEMORY;
1015 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001016 m_pDualCamCmdPtr =
1017 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001018 }
1019
1020 LOGH("mCameraId=%d",mCameraId);
1021
1022 return NO_ERROR;
1023}
1024
1025/*===========================================================================
1026 * FUNCTION : closeCamera
1027 *
1028 * DESCRIPTION: close camera
1029 *
1030 * PARAMETERS : none
1031 *
1032 * RETURN : int32_t type of status
1033 * NO_ERROR -- success
1034 * none-zero failure code
1035 *==========================================================================*/
1036int QCamera3HardwareInterface::closeCamera()
1037{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001038 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001039 int rc = NO_ERROR;
1040 char value[PROPERTY_VALUE_MAX];
1041
1042 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1043 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001044
1045 // unmap memory for related cam sync buffer
1046 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001047 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001048 if (NULL != m_pDualCamCmdHeap) {
1049 m_pDualCamCmdHeap->deallocate();
1050 delete m_pDualCamCmdHeap;
1051 m_pDualCamCmdHeap = NULL;
1052 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001053 }
1054
Thierry Strudel3d639192016-09-09 11:52:26 -07001055 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1056 mCameraHandle = NULL;
1057
1058 //reset session id to some invalid id
1059 pthread_mutex_lock(&gCamLock);
1060 sessionId[mCameraId] = 0xDEADBEEF;
1061 pthread_mutex_unlock(&gCamLock);
1062
1063 //Notify display HAL that there is no active camera session
1064 //but avoid calling the same during bootup. Refer to openCamera
1065 //for more details.
1066 property_get("service.bootanim.exit", value, "0");
1067 if (atoi(value) == 1) {
1068 pthread_mutex_lock(&gCamLock);
1069 if (--gNumCameraSessions == 0) {
1070 setCameraLaunchStatus(false);
1071 }
1072 pthread_mutex_unlock(&gCamLock);
1073 }
1074
Thierry Strudel3d639192016-09-09 11:52:26 -07001075 if (mExifParams.debug_params) {
1076 free(mExifParams.debug_params);
1077 mExifParams.debug_params = NULL;
1078 }
1079 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1080 LOGW("Failed to release flash for camera id: %d",
1081 mCameraId);
1082 }
1083 mState = CLOSED;
1084 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1085 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001086
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001087 {
1088 Mutex::Autolock l(gHdrPlusClientLock);
1089 if (gHdrPlusClient != nullptr) {
1090 // Disable HDR+ mode.
1091 disableHdrPlusModeLocked();
1092 // Disconnect Easel if it's connected.
1093 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1094 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001095 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001096
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001097 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001098 rc = gEaselManagerClient.stopMipi(mCameraId);
1099 if (rc != 0) {
1100 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1101 }
1102
1103 rc = gEaselManagerClient.suspend();
1104 if (rc != 0) {
1105 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1106 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001107 }
1108 }
1109
Thierry Strudel3d639192016-09-09 11:52:26 -07001110 return rc;
1111}
1112
1113/*===========================================================================
1114 * FUNCTION : initialize
1115 *
1116 * DESCRIPTION: Initialize frameworks callback functions
1117 *
1118 * PARAMETERS :
1119 * @callback_ops : callback function to frameworks
1120 *
1121 * RETURN :
1122 *
1123 *==========================================================================*/
1124int QCamera3HardwareInterface::initialize(
1125 const struct camera3_callback_ops *callback_ops)
1126{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001127 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001128 int rc;
1129
1130 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1131 pthread_mutex_lock(&mMutex);
1132
1133 // Validate current state
1134 switch (mState) {
1135 case OPENED:
1136 /* valid state */
1137 break;
1138 default:
1139 LOGE("Invalid state %d", mState);
1140 rc = -ENODEV;
1141 goto err1;
1142 }
1143
1144 rc = initParameters();
1145 if (rc < 0) {
1146 LOGE("initParamters failed %d", rc);
1147 goto err1;
1148 }
1149 mCallbackOps = callback_ops;
1150
1151 mChannelHandle = mCameraHandle->ops->add_channel(
1152 mCameraHandle->camera_handle, NULL, NULL, this);
1153 if (mChannelHandle == 0) {
1154 LOGE("add_channel failed");
1155 rc = -ENOMEM;
1156 pthread_mutex_unlock(&mMutex);
1157 return rc;
1158 }
1159
1160 pthread_mutex_unlock(&mMutex);
1161 mCameraInitialized = true;
1162 mState = INITIALIZED;
1163 LOGI("X");
1164 return 0;
1165
1166err1:
1167 pthread_mutex_unlock(&mMutex);
1168 return rc;
1169}
1170
1171/*===========================================================================
1172 * FUNCTION : validateStreamDimensions
1173 *
1174 * DESCRIPTION: Check if the configuration requested are those advertised
1175 *
1176 * PARAMETERS :
1177 * @stream_list : streams to be configured
1178 *
1179 * RETURN :
1180 *
1181 *==========================================================================*/
1182int QCamera3HardwareInterface::validateStreamDimensions(
1183 camera3_stream_configuration_t *streamList)
1184{
1185 int rc = NO_ERROR;
1186 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001187 uint32_t depthWidth = 0;
1188 uint32_t depthHeight = 0;
1189 if (mPDSupported) {
1190 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1191 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1192 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001193
1194 camera3_stream_t *inputStream = NULL;
1195 /*
1196 * Loop through all streams to find input stream if it exists*
1197 */
1198 for (size_t i = 0; i< streamList->num_streams; i++) {
1199 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1200 if (inputStream != NULL) {
1201 LOGE("Error, Multiple input streams requested");
1202 return -EINVAL;
1203 }
1204 inputStream = streamList->streams[i];
1205 }
1206 }
1207 /*
1208 * Loop through all streams requested in configuration
1209 * Check if unsupported sizes have been requested on any of them
1210 */
1211 for (size_t j = 0; j < streamList->num_streams; j++) {
1212 bool sizeFound = false;
1213 camera3_stream_t *newStream = streamList->streams[j];
1214
1215 uint32_t rotatedHeight = newStream->height;
1216 uint32_t rotatedWidth = newStream->width;
1217 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1218 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1219 rotatedHeight = newStream->width;
1220 rotatedWidth = newStream->height;
1221 }
1222
1223 /*
1224 * Sizes are different for each type of stream format check against
1225 * appropriate table.
1226 */
1227 switch (newStream->format) {
1228 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1229 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1230 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001231 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1232 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1233 mPDSupported) {
1234 if ((depthWidth == newStream->width) &&
1235 (depthHeight == newStream->height)) {
1236 sizeFound = true;
1237 }
1238 break;
1239 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001240 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1241 for (size_t i = 0; i < count; i++) {
1242 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1243 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1244 sizeFound = true;
1245 break;
1246 }
1247 }
1248 break;
1249 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001250 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1251 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001252 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001253 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001254 if ((depthSamplesCount == newStream->width) &&
1255 (1 == newStream->height)) {
1256 sizeFound = true;
1257 }
1258 break;
1259 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001260 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1261 /* Verify set size against generated sizes table */
1262 for (size_t i = 0; i < count; i++) {
1263 if (((int32_t)rotatedWidth ==
1264 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1265 ((int32_t)rotatedHeight ==
1266 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1267 sizeFound = true;
1268 break;
1269 }
1270 }
1271 break;
1272 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1273 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1274 default:
1275 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1276 || newStream->stream_type == CAMERA3_STREAM_INPUT
1277 || IS_USAGE_ZSL(newStream->usage)) {
1278 if (((int32_t)rotatedWidth ==
1279 gCamCapability[mCameraId]->active_array_size.width) &&
1280 ((int32_t)rotatedHeight ==
1281 gCamCapability[mCameraId]->active_array_size.height)) {
1282 sizeFound = true;
1283 break;
1284 }
1285 /* We could potentially break here to enforce ZSL stream
1286 * set from frameworks always is full active array size
1287 * but it is not clear from the spc if framework will always
1288 * follow that, also we have logic to override to full array
1289 * size, so keeping the logic lenient at the moment
1290 */
1291 }
1292 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1293 MAX_SIZES_CNT);
1294 for (size_t i = 0; i < count; i++) {
1295 if (((int32_t)rotatedWidth ==
1296 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1297 ((int32_t)rotatedHeight ==
1298 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1299 sizeFound = true;
1300 break;
1301 }
1302 }
1303 break;
1304 } /* End of switch(newStream->format) */
1305
1306 /* We error out even if a single stream has unsupported size set */
1307 if (!sizeFound) {
1308 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1309 rotatedWidth, rotatedHeight, newStream->format,
1310 gCamCapability[mCameraId]->active_array_size.width,
1311 gCamCapability[mCameraId]->active_array_size.height);
1312 rc = -EINVAL;
1313 break;
1314 }
1315 } /* End of for each stream */
1316 return rc;
1317}
1318
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001319/*===========================================================================
1320 * FUNCTION : validateUsageFlags
1321 *
1322 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1323 *
1324 * PARAMETERS :
1325 * @stream_list : streams to be configured
1326 *
1327 * RETURN :
1328 * NO_ERROR if the usage flags are supported
1329 * error code if usage flags are not supported
1330 *
1331 *==========================================================================*/
1332int QCamera3HardwareInterface::validateUsageFlags(
1333 const camera3_stream_configuration_t* streamList)
1334{
1335 for (size_t j = 0; j < streamList->num_streams; j++) {
1336 const camera3_stream_t *newStream = streamList->streams[j];
1337
1338 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1339 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1340 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1341 continue;
1342 }
1343
1344 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1345 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1346 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1347 bool forcePreviewUBWC = true;
1348 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1349 forcePreviewUBWC = false;
1350 }
1351 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1352 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1353 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1354 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1355 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1356 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1357
1358 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1359 // So color spaces will always match.
1360
1361 // Check whether underlying formats of shared streams match.
1362 if (isVideo && isPreview && videoFormat != previewFormat) {
1363 LOGE("Combined video and preview usage flag is not supported");
1364 return -EINVAL;
1365 }
1366 if (isPreview && isZSL && previewFormat != zslFormat) {
1367 LOGE("Combined preview and zsl usage flag is not supported");
1368 return -EINVAL;
1369 }
1370 if (isVideo && isZSL && videoFormat != zslFormat) {
1371 LOGE("Combined video and zsl usage flag is not supported");
1372 return -EINVAL;
1373 }
1374 }
1375 return NO_ERROR;
1376}
1377
1378/*===========================================================================
1379 * FUNCTION : validateUsageFlagsForEis
1380 *
1381 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1382 *
1383 * PARAMETERS :
1384 * @stream_list : streams to be configured
1385 *
1386 * RETURN :
1387 * NO_ERROR if the usage flags are supported
1388 * error code if usage flags are not supported
1389 *
1390 *==========================================================================*/
1391int QCamera3HardwareInterface::validateUsageFlagsForEis(
1392 const camera3_stream_configuration_t* streamList)
1393{
1394 for (size_t j = 0; j < streamList->num_streams; j++) {
1395 const camera3_stream_t *newStream = streamList->streams[j];
1396
1397 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1398 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1399
1400 // Because EIS is "hard-coded" for certain use case, and current
1401 // implementation doesn't support shared preview and video on the same
1402 // stream, return failure if EIS is forced on.
1403 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1404 LOGE("Combined video and preview usage flag is not supported due to EIS");
1405 return -EINVAL;
1406 }
1407 }
1408 return NO_ERROR;
1409}
1410
Thierry Strudel3d639192016-09-09 11:52:26 -07001411/*==============================================================================
1412 * FUNCTION : isSupportChannelNeeded
1413 *
1414 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1415 *
1416 * PARAMETERS :
1417 * @stream_list : streams to be configured
1418 * @stream_config_info : the config info for streams to be configured
1419 *
1420 * RETURN : Boolen true/false decision
1421 *
1422 *==========================================================================*/
1423bool QCamera3HardwareInterface::isSupportChannelNeeded(
1424 camera3_stream_configuration_t *streamList,
1425 cam_stream_size_info_t stream_config_info)
1426{
1427 uint32_t i;
1428 bool pprocRequested = false;
1429 /* Check for conditions where PProc pipeline does not have any streams*/
1430 for (i = 0; i < stream_config_info.num_streams; i++) {
1431 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1432 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1433 pprocRequested = true;
1434 break;
1435 }
1436 }
1437
1438 if (pprocRequested == false )
1439 return true;
1440
1441 /* Dummy stream needed if only raw or jpeg streams present */
1442 for (i = 0; i < streamList->num_streams; i++) {
1443 switch(streamList->streams[i]->format) {
1444 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1445 case HAL_PIXEL_FORMAT_RAW10:
1446 case HAL_PIXEL_FORMAT_RAW16:
1447 case HAL_PIXEL_FORMAT_BLOB:
1448 break;
1449 default:
1450 return false;
1451 }
1452 }
1453 return true;
1454}
1455
1456/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001457 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001458 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001459 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001460 *
1461 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001462 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001463 *
1464 * RETURN : int32_t type of status
1465 * NO_ERROR -- success
1466 * none-zero failure code
1467 *
1468 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001469int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001470{
1471 int32_t rc = NO_ERROR;
1472
1473 cam_dimension_t max_dim = {0, 0};
1474 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1475 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1476 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1477 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1478 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1479 }
1480
1481 clear_metadata_buffer(mParameters);
1482
1483 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1484 max_dim);
1485 if (rc != NO_ERROR) {
1486 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1487 return rc;
1488 }
1489
1490 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1491 if (rc != NO_ERROR) {
1492 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1493 return rc;
1494 }
1495
1496 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001497 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001498
1499 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1500 mParameters);
1501 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001502 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001503 return rc;
1504 }
1505
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001506 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001507 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1508 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1509 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1510 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1511 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001512
1513 return rc;
1514}
1515
1516/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001517 * FUNCTION : addToPPFeatureMask
1518 *
1519 * DESCRIPTION: add additional features to pp feature mask based on
1520 * stream type and usecase
1521 *
1522 * PARAMETERS :
1523 * @stream_format : stream type for feature mask
1524 * @stream_idx : stream idx within postprocess_mask list to change
1525 *
1526 * RETURN : NULL
1527 *
1528 *==========================================================================*/
1529void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1530 uint32_t stream_idx)
1531{
1532 char feature_mask_value[PROPERTY_VALUE_MAX];
1533 cam_feature_mask_t feature_mask;
1534 int args_converted;
1535 int property_len;
1536
1537 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001538#ifdef _LE_CAMERA_
1539 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1540 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1541 property_len = property_get("persist.camera.hal3.feature",
1542 feature_mask_value, swtnr_feature_mask_value);
1543#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001544 property_len = property_get("persist.camera.hal3.feature",
1545 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001546#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001547 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1548 (feature_mask_value[1] == 'x')) {
1549 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1550 } else {
1551 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1552 }
1553 if (1 != args_converted) {
1554 feature_mask = 0;
1555 LOGE("Wrong feature mask %s", feature_mask_value);
1556 return;
1557 }
1558
1559 switch (stream_format) {
1560 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1561 /* Add LLVD to pp feature mask only if video hint is enabled */
1562 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1563 mStreamConfigInfo.postprocess_mask[stream_idx]
1564 |= CAM_QTI_FEATURE_SW_TNR;
1565 LOGH("Added SW TNR to pp feature mask");
1566 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1567 mStreamConfigInfo.postprocess_mask[stream_idx]
1568 |= CAM_QCOM_FEATURE_LLVD;
1569 LOGH("Added LLVD SeeMore to pp feature mask");
1570 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001571 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1572 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1573 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1574 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001575 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1576 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1577 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1578 CAM_QTI_FEATURE_BINNING_CORRECTION;
1579 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001580 break;
1581 }
1582 default:
1583 break;
1584 }
1585 LOGD("PP feature mask %llx",
1586 mStreamConfigInfo.postprocess_mask[stream_idx]);
1587}
1588
1589/*==============================================================================
1590 * FUNCTION : updateFpsInPreviewBuffer
1591 *
1592 * DESCRIPTION: update FPS information in preview buffer.
1593 *
1594 * PARAMETERS :
1595 * @metadata : pointer to metadata buffer
1596 * @frame_number: frame_number to look for in pending buffer list
1597 *
1598 * RETURN : None
1599 *
1600 *==========================================================================*/
1601void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1602 uint32_t frame_number)
1603{
1604 // Mark all pending buffers for this particular request
1605 // with corresponding framerate information
1606 for (List<PendingBuffersInRequest>::iterator req =
1607 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1608 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1609 for(List<PendingBufferInfo>::iterator j =
1610 req->mPendingBufferList.begin();
1611 j != req->mPendingBufferList.end(); j++) {
1612 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1613 if ((req->frame_number == frame_number) &&
1614 (channel->getStreamTypeMask() &
1615 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1616 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1617 CAM_INTF_PARM_FPS_RANGE, metadata) {
1618 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1619 struct private_handle_t *priv_handle =
1620 (struct private_handle_t *)(*(j->buffer));
1621 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1622 }
1623 }
1624 }
1625 }
1626}
1627
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001628/*==============================================================================
1629 * FUNCTION : updateTimeStampInPendingBuffers
1630 *
1631 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1632 * of a frame number
1633 *
1634 * PARAMETERS :
1635 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1636 * @timestamp : timestamp to be set
1637 *
1638 * RETURN : None
1639 *
1640 *==========================================================================*/
1641void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1642 uint32_t frameNumber, nsecs_t timestamp)
1643{
1644 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1645 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1646 if (req->frame_number != frameNumber)
1647 continue;
1648
1649 for (auto k = req->mPendingBufferList.begin();
1650 k != req->mPendingBufferList.end(); k++ ) {
1651 struct private_handle_t *priv_handle =
1652 (struct private_handle_t *) (*(k->buffer));
1653 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1654 }
1655 }
1656 return;
1657}
1658
Thierry Strudel3d639192016-09-09 11:52:26 -07001659/*===========================================================================
1660 * FUNCTION : configureStreams
1661 *
1662 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1663 * and output streams.
1664 *
1665 * PARAMETERS :
1666 * @stream_list : streams to be configured
1667 *
1668 * RETURN :
1669 *
1670 *==========================================================================*/
1671int QCamera3HardwareInterface::configureStreams(
1672 camera3_stream_configuration_t *streamList)
1673{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001674 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001675 int rc = 0;
1676
1677 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001678 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001679 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001680 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001681
1682 return rc;
1683}
1684
1685/*===========================================================================
1686 * FUNCTION : configureStreamsPerfLocked
1687 *
1688 * DESCRIPTION: configureStreams while perfLock is held.
1689 *
1690 * PARAMETERS :
1691 * @stream_list : streams to be configured
1692 *
1693 * RETURN : int32_t type of status
1694 * NO_ERROR -- success
1695 * none-zero failure code
1696 *==========================================================================*/
1697int QCamera3HardwareInterface::configureStreamsPerfLocked(
1698 camera3_stream_configuration_t *streamList)
1699{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001700 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001701 int rc = 0;
1702
1703 // Sanity check stream_list
1704 if (streamList == NULL) {
1705 LOGE("NULL stream configuration");
1706 return BAD_VALUE;
1707 }
1708 if (streamList->streams == NULL) {
1709 LOGE("NULL stream list");
1710 return BAD_VALUE;
1711 }
1712
1713 if (streamList->num_streams < 1) {
1714 LOGE("Bad number of streams requested: %d",
1715 streamList->num_streams);
1716 return BAD_VALUE;
1717 }
1718
1719 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1720 LOGE("Maximum number of streams %d exceeded: %d",
1721 MAX_NUM_STREAMS, streamList->num_streams);
1722 return BAD_VALUE;
1723 }
1724
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001725 rc = validateUsageFlags(streamList);
1726 if (rc != NO_ERROR) {
1727 return rc;
1728 }
1729
Thierry Strudel3d639192016-09-09 11:52:26 -07001730 mOpMode = streamList->operation_mode;
1731 LOGD("mOpMode: %d", mOpMode);
1732
1733 /* first invalidate all the steams in the mStreamList
1734 * if they appear again, they will be validated */
1735 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1736 it != mStreamInfo.end(); it++) {
1737 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1738 if (channel) {
1739 channel->stop();
1740 }
1741 (*it)->status = INVALID;
1742 }
1743
1744 if (mRawDumpChannel) {
1745 mRawDumpChannel->stop();
1746 delete mRawDumpChannel;
1747 mRawDumpChannel = NULL;
1748 }
1749
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001750 if (mHdrPlusRawSrcChannel) {
1751 mHdrPlusRawSrcChannel->stop();
1752 delete mHdrPlusRawSrcChannel;
1753 mHdrPlusRawSrcChannel = NULL;
1754 }
1755
Thierry Strudel3d639192016-09-09 11:52:26 -07001756 if (mSupportChannel)
1757 mSupportChannel->stop();
1758
1759 if (mAnalysisChannel) {
1760 mAnalysisChannel->stop();
1761 }
1762 if (mMetadataChannel) {
1763 /* If content of mStreamInfo is not 0, there is metadata stream */
1764 mMetadataChannel->stop();
1765 }
1766 if (mChannelHandle) {
1767 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1768 mChannelHandle);
1769 LOGD("stopping channel %d", mChannelHandle);
1770 }
1771
1772 pthread_mutex_lock(&mMutex);
1773
1774 // Check state
1775 switch (mState) {
1776 case INITIALIZED:
1777 case CONFIGURED:
1778 case STARTED:
1779 /* valid state */
1780 break;
1781 default:
1782 LOGE("Invalid state %d", mState);
1783 pthread_mutex_unlock(&mMutex);
1784 return -ENODEV;
1785 }
1786
1787 /* Check whether we have video stream */
1788 m_bIs4KVideo = false;
1789 m_bIsVideo = false;
1790 m_bEisSupportedSize = false;
1791 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001792 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001793 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001794 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001795 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001796 uint32_t videoWidth = 0U;
1797 uint32_t videoHeight = 0U;
1798 size_t rawStreamCnt = 0;
1799 size_t stallStreamCnt = 0;
1800 size_t processedStreamCnt = 0;
1801 // Number of streams on ISP encoder path
1802 size_t numStreamsOnEncoder = 0;
1803 size_t numYuv888OnEncoder = 0;
1804 bool bYuv888OverrideJpeg = false;
1805 cam_dimension_t largeYuv888Size = {0, 0};
1806 cam_dimension_t maxViewfinderSize = {0, 0};
1807 bool bJpegExceeds4K = false;
1808 bool bJpegOnEncoder = false;
1809 bool bUseCommonFeatureMask = false;
1810 cam_feature_mask_t commonFeatureMask = 0;
1811 bool bSmallJpegSize = false;
1812 uint32_t width_ratio;
1813 uint32_t height_ratio;
1814 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1815 camera3_stream_t *inputStream = NULL;
1816 bool isJpeg = false;
1817 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001818 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001819 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001820
1821 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1822
1823 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001824 uint8_t eis_prop_set;
1825 uint32_t maxEisWidth = 0;
1826 uint32_t maxEisHeight = 0;
1827
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001828 // Initialize all instant AEC related variables
1829 mInstantAEC = false;
1830 mResetInstantAEC = false;
1831 mInstantAECSettledFrameNumber = 0;
1832 mAecSkipDisplayFrameBound = 0;
1833 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001834 mCurrFeatureState = 0;
1835 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001836
Thierry Strudel3d639192016-09-09 11:52:26 -07001837 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1838
1839 size_t count = IS_TYPE_MAX;
1840 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1841 for (size_t i = 0; i < count; i++) {
1842 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001843 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1844 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001845 break;
1846 }
1847 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001848
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001849 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001850 maxEisWidth = MAX_EIS_WIDTH;
1851 maxEisHeight = MAX_EIS_HEIGHT;
1852 }
1853
1854 /* EIS setprop control */
1855 char eis_prop[PROPERTY_VALUE_MAX];
1856 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001857 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001858 eis_prop_set = (uint8_t)atoi(eis_prop);
1859
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001860 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001861 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1862
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001863 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1864 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001865
Thierry Strudel3d639192016-09-09 11:52:26 -07001866 /* stream configurations */
1867 for (size_t i = 0; i < streamList->num_streams; i++) {
1868 camera3_stream_t *newStream = streamList->streams[i];
1869 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1870 "height = %d, rotation = %d, usage = 0x%x",
1871 i, newStream->stream_type, newStream->format,
1872 newStream->width, newStream->height, newStream->rotation,
1873 newStream->usage);
1874 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1875 newStream->stream_type == CAMERA3_STREAM_INPUT){
1876 isZsl = true;
1877 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001878 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1879 IS_USAGE_PREVIEW(newStream->usage)) {
1880 isPreview = true;
1881 }
1882
Thierry Strudel3d639192016-09-09 11:52:26 -07001883 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1884 inputStream = newStream;
1885 }
1886
Emilian Peev7650c122017-01-19 08:24:33 -08001887 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1888 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001889 isJpeg = true;
1890 jpegSize.width = newStream->width;
1891 jpegSize.height = newStream->height;
1892 if (newStream->width > VIDEO_4K_WIDTH ||
1893 newStream->height > VIDEO_4K_HEIGHT)
1894 bJpegExceeds4K = true;
1895 }
1896
1897 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1898 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1899 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001900 // In HAL3 we can have multiple different video streams.
1901 // The variables video width and height are used below as
1902 // dimensions of the biggest of them
1903 if (videoWidth < newStream->width ||
1904 videoHeight < newStream->height) {
1905 videoWidth = newStream->width;
1906 videoHeight = newStream->height;
1907 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001908 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1909 (VIDEO_4K_HEIGHT <= newStream->height)) {
1910 m_bIs4KVideo = true;
1911 }
1912 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1913 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001914
Thierry Strudel3d639192016-09-09 11:52:26 -07001915 }
1916 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1917 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1918 switch (newStream->format) {
1919 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001920 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1921 depthPresent = true;
1922 break;
1923 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001924 stallStreamCnt++;
1925 if (isOnEncoder(maxViewfinderSize, newStream->width,
1926 newStream->height)) {
1927 numStreamsOnEncoder++;
1928 bJpegOnEncoder = true;
1929 }
1930 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1931 newStream->width);
1932 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1933 newStream->height);;
1934 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1935 "FATAL: max_downscale_factor cannot be zero and so assert");
1936 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1937 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1938 LOGH("Setting small jpeg size flag to true");
1939 bSmallJpegSize = true;
1940 }
1941 break;
1942 case HAL_PIXEL_FORMAT_RAW10:
1943 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1944 case HAL_PIXEL_FORMAT_RAW16:
1945 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001946 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1947 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1948 pdStatCount++;
1949 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001950 break;
1951 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1952 processedStreamCnt++;
1953 if (isOnEncoder(maxViewfinderSize, newStream->width,
1954 newStream->height)) {
1955 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1956 !IS_USAGE_ZSL(newStream->usage)) {
1957 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1958 }
1959 numStreamsOnEncoder++;
1960 }
1961 break;
1962 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1963 processedStreamCnt++;
1964 if (isOnEncoder(maxViewfinderSize, newStream->width,
1965 newStream->height)) {
1966 // If Yuv888 size is not greater than 4K, set feature mask
1967 // to SUPERSET so that it support concurrent request on
1968 // YUV and JPEG.
1969 if (newStream->width <= VIDEO_4K_WIDTH &&
1970 newStream->height <= VIDEO_4K_HEIGHT) {
1971 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1972 }
1973 numStreamsOnEncoder++;
1974 numYuv888OnEncoder++;
1975 largeYuv888Size.width = newStream->width;
1976 largeYuv888Size.height = newStream->height;
1977 }
1978 break;
1979 default:
1980 processedStreamCnt++;
1981 if (isOnEncoder(maxViewfinderSize, newStream->width,
1982 newStream->height)) {
1983 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1984 numStreamsOnEncoder++;
1985 }
1986 break;
1987 }
1988
1989 }
1990 }
1991
1992 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1993 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1994 !m_bIsVideo) {
1995 m_bEisEnable = false;
1996 }
1997
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001998 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1999 pthread_mutex_unlock(&mMutex);
2000 return -EINVAL;
2001 }
2002
Thierry Strudel54dc9782017-02-15 12:12:10 -08002003 uint8_t forceEnableTnr = 0;
2004 char tnr_prop[PROPERTY_VALUE_MAX];
2005 memset(tnr_prop, 0, sizeof(tnr_prop));
2006 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2007 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2008
Thierry Strudel3d639192016-09-09 11:52:26 -07002009 /* Logic to enable/disable TNR based on specific config size/etc.*/
2010 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002011 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2012 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002013 else if (forceEnableTnr)
2014 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002015
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002016 char videoHdrProp[PROPERTY_VALUE_MAX];
2017 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2018 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2019 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2020
2021 if (hdr_mode_prop == 1 && m_bIsVideo &&
2022 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2023 m_bVideoHdrEnabled = true;
2024 else
2025 m_bVideoHdrEnabled = false;
2026
2027
Thierry Strudel3d639192016-09-09 11:52:26 -07002028 /* Check if num_streams is sane */
2029 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2030 rawStreamCnt > MAX_RAW_STREAMS ||
2031 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2032 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2033 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2034 pthread_mutex_unlock(&mMutex);
2035 return -EINVAL;
2036 }
2037 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002038 if (isZsl && m_bIs4KVideo) {
2039 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002040 pthread_mutex_unlock(&mMutex);
2041 return -EINVAL;
2042 }
2043 /* Check if stream sizes are sane */
2044 if (numStreamsOnEncoder > 2) {
2045 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2046 pthread_mutex_unlock(&mMutex);
2047 return -EINVAL;
2048 } else if (1 < numStreamsOnEncoder){
2049 bUseCommonFeatureMask = true;
2050 LOGH("Multiple streams above max viewfinder size, common mask needed");
2051 }
2052
2053 /* Check if BLOB size is greater than 4k in 4k recording case */
2054 if (m_bIs4KVideo && bJpegExceeds4K) {
2055 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2056 pthread_mutex_unlock(&mMutex);
2057 return -EINVAL;
2058 }
2059
Emilian Peev7650c122017-01-19 08:24:33 -08002060 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2061 depthPresent) {
2062 LOGE("HAL doesn't support depth streams in HFR mode!");
2063 pthread_mutex_unlock(&mMutex);
2064 return -EINVAL;
2065 }
2066
Thierry Strudel3d639192016-09-09 11:52:26 -07002067 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2068 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2069 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2070 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2071 // configurations:
2072 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2073 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2074 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2075 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2076 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2077 __func__);
2078 pthread_mutex_unlock(&mMutex);
2079 return -EINVAL;
2080 }
2081
2082 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2083 // the YUV stream's size is greater or equal to the JPEG size, set common
2084 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2085 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2086 jpegSize.width, jpegSize.height) &&
2087 largeYuv888Size.width > jpegSize.width &&
2088 largeYuv888Size.height > jpegSize.height) {
2089 bYuv888OverrideJpeg = true;
2090 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2091 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2092 }
2093
2094 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2095 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2096 commonFeatureMask);
2097 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2098 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2099
2100 rc = validateStreamDimensions(streamList);
2101 if (rc == NO_ERROR) {
2102 rc = validateStreamRotations(streamList);
2103 }
2104 if (rc != NO_ERROR) {
2105 LOGE("Invalid stream configuration requested!");
2106 pthread_mutex_unlock(&mMutex);
2107 return rc;
2108 }
2109
Emilian Peev0f3c3162017-03-15 12:57:46 +00002110 if (1 < pdStatCount) {
2111 LOGE("HAL doesn't support multiple PD streams");
2112 pthread_mutex_unlock(&mMutex);
2113 return -EINVAL;
2114 }
2115
2116 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2117 (1 == pdStatCount)) {
2118 LOGE("HAL doesn't support PD streams in HFR mode!");
2119 pthread_mutex_unlock(&mMutex);
2120 return -EINVAL;
2121 }
2122
Thierry Strudel3d639192016-09-09 11:52:26 -07002123 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2124 for (size_t i = 0; i < streamList->num_streams; i++) {
2125 camera3_stream_t *newStream = streamList->streams[i];
2126 LOGH("newStream type = %d, stream format = %d "
2127 "stream size : %d x %d, stream rotation = %d",
2128 newStream->stream_type, newStream->format,
2129 newStream->width, newStream->height, newStream->rotation);
2130 //if the stream is in the mStreamList validate it
2131 bool stream_exists = false;
2132 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2133 it != mStreamInfo.end(); it++) {
2134 if ((*it)->stream == newStream) {
2135 QCamera3ProcessingChannel *channel =
2136 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2137 stream_exists = true;
2138 if (channel)
2139 delete channel;
2140 (*it)->status = VALID;
2141 (*it)->stream->priv = NULL;
2142 (*it)->channel = NULL;
2143 }
2144 }
2145 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2146 //new stream
2147 stream_info_t* stream_info;
2148 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2149 if (!stream_info) {
2150 LOGE("Could not allocate stream info");
2151 rc = -ENOMEM;
2152 pthread_mutex_unlock(&mMutex);
2153 return rc;
2154 }
2155 stream_info->stream = newStream;
2156 stream_info->status = VALID;
2157 stream_info->channel = NULL;
2158 mStreamInfo.push_back(stream_info);
2159 }
2160 /* Covers Opaque ZSL and API1 F/W ZSL */
2161 if (IS_USAGE_ZSL(newStream->usage)
2162 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2163 if (zslStream != NULL) {
2164 LOGE("Multiple input/reprocess streams requested!");
2165 pthread_mutex_unlock(&mMutex);
2166 return BAD_VALUE;
2167 }
2168 zslStream = newStream;
2169 }
2170 /* Covers YUV reprocess */
2171 if (inputStream != NULL) {
2172 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2173 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2174 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2175 && inputStream->width == newStream->width
2176 && inputStream->height == newStream->height) {
2177 if (zslStream != NULL) {
2178 /* This scenario indicates multiple YUV streams with same size
2179 * as input stream have been requested, since zsl stream handle
2180 * is solely use for the purpose of overriding the size of streams
2181 * which share h/w streams we will just make a guess here as to
2182 * which of the stream is a ZSL stream, this will be refactored
2183 * once we make generic logic for streams sharing encoder output
2184 */
2185 LOGH("Warning, Multiple ip/reprocess streams requested!");
2186 }
2187 zslStream = newStream;
2188 }
2189 }
2190 }
2191
2192 /* If a zsl stream is set, we know that we have configured at least one input or
2193 bidirectional stream */
2194 if (NULL != zslStream) {
2195 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2196 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2197 mInputStreamInfo.format = zslStream->format;
2198 mInputStreamInfo.usage = zslStream->usage;
2199 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2200 mInputStreamInfo.dim.width,
2201 mInputStreamInfo.dim.height,
2202 mInputStreamInfo.format, mInputStreamInfo.usage);
2203 }
2204
2205 cleanAndSortStreamInfo();
2206 if (mMetadataChannel) {
2207 delete mMetadataChannel;
2208 mMetadataChannel = NULL;
2209 }
2210 if (mSupportChannel) {
2211 delete mSupportChannel;
2212 mSupportChannel = NULL;
2213 }
2214
2215 if (mAnalysisChannel) {
2216 delete mAnalysisChannel;
2217 mAnalysisChannel = NULL;
2218 }
2219
2220 if (mDummyBatchChannel) {
2221 delete mDummyBatchChannel;
2222 mDummyBatchChannel = NULL;
2223 }
2224
Emilian Peev7650c122017-01-19 08:24:33 -08002225 if (mDepthChannel) {
2226 mDepthChannel = NULL;
2227 }
2228
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002229 mShutterDispatcher.clear();
2230 mOutputBufferDispatcher.clear();
2231
Thierry Strudel2896d122017-02-23 19:18:03 -08002232 char is_type_value[PROPERTY_VALUE_MAX];
2233 property_get("persist.camera.is_type", is_type_value, "4");
2234 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2235
Binhao Line406f062017-05-03 14:39:44 -07002236 char property_value[PROPERTY_VALUE_MAX];
2237 property_get("persist.camera.gzoom.at", property_value, "0");
2238 int goog_zoom_at = atoi(property_value);
2239 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0);
2240 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0);
2241
2242 property_get("persist.camera.gzoom.4k", property_value, "0");
2243 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2244
Thierry Strudel3d639192016-09-09 11:52:26 -07002245 //Create metadata channel and initialize it
2246 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2247 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2248 gCamCapability[mCameraId]->color_arrangement);
2249 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2250 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002251 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002252 if (mMetadataChannel == NULL) {
2253 LOGE("failed to allocate metadata channel");
2254 rc = -ENOMEM;
2255 pthread_mutex_unlock(&mMutex);
2256 return rc;
2257 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002258 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002259 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2260 if (rc < 0) {
2261 LOGE("metadata channel initialization failed");
2262 delete mMetadataChannel;
2263 mMetadataChannel = NULL;
2264 pthread_mutex_unlock(&mMutex);
2265 return rc;
2266 }
2267
Thierry Strudel2896d122017-02-23 19:18:03 -08002268 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002269 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002270 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002271 // Keep track of preview/video streams indices.
2272 // There could be more than one preview streams, but only one video stream.
2273 int32_t video_stream_idx = -1;
2274 int32_t preview_stream_idx[streamList->num_streams];
2275 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002276 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2277 /* Allocate channel objects for the requested streams */
2278 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002279
Thierry Strudel3d639192016-09-09 11:52:26 -07002280 camera3_stream_t *newStream = streamList->streams[i];
2281 uint32_t stream_usage = newStream->usage;
2282 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2283 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2284 struct camera_info *p_info = NULL;
2285 pthread_mutex_lock(&gCamLock);
2286 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2287 pthread_mutex_unlock(&gCamLock);
2288 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2289 || IS_USAGE_ZSL(newStream->usage)) &&
2290 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002291 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002292 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002293 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2294 if (bUseCommonFeatureMask)
2295 zsl_ppmask = commonFeatureMask;
2296 else
2297 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002298 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002299 if (numStreamsOnEncoder > 0)
2300 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2301 else
2302 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002303 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002304 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002305 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002306 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002307 LOGH("Input stream configured, reprocess config");
2308 } else {
2309 //for non zsl streams find out the format
2310 switch (newStream->format) {
2311 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2312 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002313 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002314 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2315 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2316 /* add additional features to pp feature mask */
2317 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2318 mStreamConfigInfo.num_streams);
2319
2320 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2321 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2322 CAM_STREAM_TYPE_VIDEO;
2323 if (m_bTnrEnabled && m_bTnrVideo) {
2324 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2325 CAM_QCOM_FEATURE_CPP_TNR;
2326 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2327 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2328 ~CAM_QCOM_FEATURE_CDS;
2329 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002330 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2331 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2332 CAM_QTI_FEATURE_PPEISCORE;
2333 }
Binhao Line406f062017-05-03 14:39:44 -07002334 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2335 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2336 CAM_QCOM_FEATURE_GOOG_ZOOM;
2337 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002338 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002339 } else {
2340 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2341 CAM_STREAM_TYPE_PREVIEW;
2342 if (m_bTnrEnabled && m_bTnrPreview) {
2343 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2344 CAM_QCOM_FEATURE_CPP_TNR;
2345 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2346 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2347 ~CAM_QCOM_FEATURE_CDS;
2348 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002349 if(!m_bSwTnrPreview) {
2350 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2351 ~CAM_QTI_FEATURE_SW_TNR;
2352 }
Binhao Line406f062017-05-03 14:39:44 -07002353 if (is_goog_zoom_preview_enabled) {
2354 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2355 CAM_QCOM_FEATURE_GOOG_ZOOM;
2356 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002357 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002358 padding_info.width_padding = mSurfaceStridePadding;
2359 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002360 previewSize.width = (int32_t)newStream->width;
2361 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002362 }
2363 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2364 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2365 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2366 newStream->height;
2367 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2368 newStream->width;
2369 }
2370 }
2371 break;
2372 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002373 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002374 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2375 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2376 if (bUseCommonFeatureMask)
2377 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2378 commonFeatureMask;
2379 else
2380 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2381 CAM_QCOM_FEATURE_NONE;
2382 } else {
2383 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2384 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2385 }
2386 break;
2387 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002388 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002389 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2390 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2391 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2392 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2393 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002394 /* Remove rotation if it is not supported
2395 for 4K LiveVideo snapshot case (online processing) */
2396 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2397 CAM_QCOM_FEATURE_ROTATION)) {
2398 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2399 &= ~CAM_QCOM_FEATURE_ROTATION;
2400 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002401 } else {
2402 if (bUseCommonFeatureMask &&
2403 isOnEncoder(maxViewfinderSize, newStream->width,
2404 newStream->height)) {
2405 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2406 } else {
2407 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2408 }
2409 }
2410 if (isZsl) {
2411 if (zslStream) {
2412 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2413 (int32_t)zslStream->width;
2414 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2415 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002416 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2417 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002418 } else {
2419 LOGE("Error, No ZSL stream identified");
2420 pthread_mutex_unlock(&mMutex);
2421 return -EINVAL;
2422 }
2423 } else if (m_bIs4KVideo) {
2424 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2425 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2426 } else if (bYuv888OverrideJpeg) {
2427 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2428 (int32_t)largeYuv888Size.width;
2429 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2430 (int32_t)largeYuv888Size.height;
2431 }
2432 break;
2433 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2434 case HAL_PIXEL_FORMAT_RAW16:
2435 case HAL_PIXEL_FORMAT_RAW10:
2436 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2438 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002439 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2440 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2441 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2442 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2443 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2444 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2445 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2446 gCamCapability[mCameraId]->dt[mPDIndex];
2447 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2448 gCamCapability[mCameraId]->vc[mPDIndex];
2449 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002450 break;
2451 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002452 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002453 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2454 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2455 break;
2456 }
2457 }
2458
2459 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2460 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2461 gCamCapability[mCameraId]->color_arrangement);
2462
2463 if (newStream->priv == NULL) {
2464 //New stream, construct channel
2465 switch (newStream->stream_type) {
2466 case CAMERA3_STREAM_INPUT:
2467 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2468 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2469 break;
2470 case CAMERA3_STREAM_BIDIRECTIONAL:
2471 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2472 GRALLOC_USAGE_HW_CAMERA_WRITE;
2473 break;
2474 case CAMERA3_STREAM_OUTPUT:
2475 /* For video encoding stream, set read/write rarely
2476 * flag so that they may be set to un-cached */
2477 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2478 newStream->usage |=
2479 (GRALLOC_USAGE_SW_READ_RARELY |
2480 GRALLOC_USAGE_SW_WRITE_RARELY |
2481 GRALLOC_USAGE_HW_CAMERA_WRITE);
2482 else if (IS_USAGE_ZSL(newStream->usage))
2483 {
2484 LOGD("ZSL usage flag skipping");
2485 }
2486 else if (newStream == zslStream
2487 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2488 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2489 } else
2490 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2491 break;
2492 default:
2493 LOGE("Invalid stream_type %d", newStream->stream_type);
2494 break;
2495 }
2496
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002497 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002498 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2499 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2500 QCamera3ProcessingChannel *channel = NULL;
2501 switch (newStream->format) {
2502 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2503 if ((newStream->usage &
2504 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2505 (streamList->operation_mode ==
2506 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2507 ) {
2508 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2509 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002510 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002511 this,
2512 newStream,
2513 (cam_stream_type_t)
2514 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2515 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2516 mMetadataChannel,
2517 0); //heap buffers are not required for HFR video channel
2518 if (channel == NULL) {
2519 LOGE("allocation of channel failed");
2520 pthread_mutex_unlock(&mMutex);
2521 return -ENOMEM;
2522 }
2523 //channel->getNumBuffers() will return 0 here so use
2524 //MAX_INFLIGH_HFR_REQUESTS
2525 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2526 newStream->priv = channel;
2527 LOGI("num video buffers in HFR mode: %d",
2528 MAX_INFLIGHT_HFR_REQUESTS);
2529 } else {
2530 /* Copy stream contents in HFR preview only case to create
2531 * dummy batch channel so that sensor streaming is in
2532 * HFR mode */
2533 if (!m_bIsVideo && (streamList->operation_mode ==
2534 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2535 mDummyBatchStream = *newStream;
2536 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002537 int bufferCount = MAX_INFLIGHT_REQUESTS;
2538 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2539 CAM_STREAM_TYPE_VIDEO) {
2540 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2541 bufferCount = MAX_VIDEO_BUFFERS;
2542 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002543 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2544 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002545 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002546 this,
2547 newStream,
2548 (cam_stream_type_t)
2549 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2550 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2551 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002552 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002553 if (channel == NULL) {
2554 LOGE("allocation of channel failed");
2555 pthread_mutex_unlock(&mMutex);
2556 return -ENOMEM;
2557 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002558 /* disable UBWC for preview, though supported,
2559 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002560 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002561 (previewSize.width == (int32_t)videoWidth)&&
2562 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002563 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002564 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002565 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002566 /* When goog_zoom is linked to the preview or video stream,
2567 * disable ubwc to the linked stream */
2568 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2569 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2570 channel->setUBWCEnabled(false);
2571 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002572 newStream->max_buffers = channel->getNumBuffers();
2573 newStream->priv = channel;
2574 }
2575 break;
2576 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2577 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2578 mChannelHandle,
2579 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002580 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002581 this,
2582 newStream,
2583 (cam_stream_type_t)
2584 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2585 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2586 mMetadataChannel);
2587 if (channel == NULL) {
2588 LOGE("allocation of YUV channel failed");
2589 pthread_mutex_unlock(&mMutex);
2590 return -ENOMEM;
2591 }
2592 newStream->max_buffers = channel->getNumBuffers();
2593 newStream->priv = channel;
2594 break;
2595 }
2596 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2597 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002598 case HAL_PIXEL_FORMAT_RAW10: {
2599 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2600 (HAL_DATASPACE_DEPTH != newStream->data_space))
2601 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002602 mRawChannel = new QCamera3RawChannel(
2603 mCameraHandle->camera_handle, mChannelHandle,
2604 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002605 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002606 this, newStream,
2607 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002608 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002609 if (mRawChannel == NULL) {
2610 LOGE("allocation of raw channel failed");
2611 pthread_mutex_unlock(&mMutex);
2612 return -ENOMEM;
2613 }
2614 newStream->max_buffers = mRawChannel->getNumBuffers();
2615 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2616 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002617 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002618 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002619 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2620 mDepthChannel = new QCamera3DepthChannel(
2621 mCameraHandle->camera_handle, mChannelHandle,
2622 mCameraHandle->ops, NULL, NULL, &padding_info,
2623 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2624 mMetadataChannel);
2625 if (NULL == mDepthChannel) {
2626 LOGE("Allocation of depth channel failed");
2627 pthread_mutex_unlock(&mMutex);
2628 return NO_MEMORY;
2629 }
2630 newStream->priv = mDepthChannel;
2631 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2632 } else {
2633 // Max live snapshot inflight buffer is 1. This is to mitigate
2634 // frame drop issues for video snapshot. The more buffers being
2635 // allocated, the more frame drops there are.
2636 mPictureChannel = new QCamera3PicChannel(
2637 mCameraHandle->camera_handle, mChannelHandle,
2638 mCameraHandle->ops, captureResultCb,
2639 setBufferErrorStatus, &padding_info, this, newStream,
2640 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2641 m_bIs4KVideo, isZsl, mMetadataChannel,
2642 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2643 if (mPictureChannel == NULL) {
2644 LOGE("allocation of channel failed");
2645 pthread_mutex_unlock(&mMutex);
2646 return -ENOMEM;
2647 }
2648 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2649 newStream->max_buffers = mPictureChannel->getNumBuffers();
2650 mPictureChannel->overrideYuvSize(
2651 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2652 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002653 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002654 break;
2655
2656 default:
2657 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002658 pthread_mutex_unlock(&mMutex);
2659 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002660 }
2661 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2662 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2663 } else {
2664 LOGE("Error, Unknown stream type");
2665 pthread_mutex_unlock(&mMutex);
2666 return -EINVAL;
2667 }
2668
2669 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002670 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2671 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002672 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002673 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002674 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2675 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2676 }
2677 }
2678
2679 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2680 it != mStreamInfo.end(); it++) {
2681 if ((*it)->stream == newStream) {
2682 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2683 break;
2684 }
2685 }
2686 } else {
2687 // Channel already exists for this stream
2688 // Do nothing for now
2689 }
2690 padding_info = gCamCapability[mCameraId]->padding_info;
2691
Emilian Peev7650c122017-01-19 08:24:33 -08002692 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002693 * since there is no real stream associated with it
2694 */
Emilian Peev7650c122017-01-19 08:24:33 -08002695 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002696 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2697 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002698 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002699 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002700 }
2701
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002702 // Let buffer dispatcher know the configured streams.
2703 mOutputBufferDispatcher.configureStreams(streamList);
2704
Binhao Lincdb362a2017-04-20 13:31:54 -07002705 // By default, preview stream TNR is disabled.
2706 // Enable TNR to the preview stream if all conditions below are satisfied:
2707 // 1. resolution <= 1080p.
2708 // 2. preview resolution == video resolution.
2709 // 3. video stream TNR is enabled.
2710 // 4. EIS2.0
2711 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2712 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2713 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2714 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2715 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2716 video_stream->width == preview_stream->width &&
2717 video_stream->height == preview_stream->height) {
2718 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2719 CAM_QCOM_FEATURE_CPP_TNR;
2720 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2721 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2722 ~CAM_QCOM_FEATURE_CDS;
2723 }
2724 }
2725
Thierry Strudel2896d122017-02-23 19:18:03 -08002726 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2727 onlyRaw = false;
2728 }
2729
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002730 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002731 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002732 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002733 cam_analysis_info_t analysisInfo;
2734 int32_t ret = NO_ERROR;
2735 ret = mCommon.getAnalysisInfo(
2736 FALSE,
2737 analysisFeatureMask,
2738 &analysisInfo);
2739 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002740 cam_color_filter_arrangement_t analysis_color_arrangement =
2741 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2742 CAM_FILTER_ARRANGEMENT_Y :
2743 gCamCapability[mCameraId]->color_arrangement);
2744 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2745 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002746 cam_dimension_t analysisDim;
2747 analysisDim = mCommon.getMatchingDimension(previewSize,
2748 analysisInfo.analysis_recommended_res);
2749
2750 mAnalysisChannel = new QCamera3SupportChannel(
2751 mCameraHandle->camera_handle,
2752 mChannelHandle,
2753 mCameraHandle->ops,
2754 &analysisInfo.analysis_padding_info,
2755 analysisFeatureMask,
2756 CAM_STREAM_TYPE_ANALYSIS,
2757 &analysisDim,
2758 (analysisInfo.analysis_format
2759 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2760 : CAM_FORMAT_YUV_420_NV21),
2761 analysisInfo.hw_analysis_supported,
2762 gCamCapability[mCameraId]->color_arrangement,
2763 this,
2764 0); // force buffer count to 0
2765 } else {
2766 LOGW("getAnalysisInfo failed, ret = %d", ret);
2767 }
2768 if (!mAnalysisChannel) {
2769 LOGW("Analysis channel cannot be created");
2770 }
2771 }
2772
Thierry Strudel3d639192016-09-09 11:52:26 -07002773 //RAW DUMP channel
2774 if (mEnableRawDump && isRawStreamRequested == false){
2775 cam_dimension_t rawDumpSize;
2776 rawDumpSize = getMaxRawSize(mCameraId);
2777 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2778 setPAAFSupport(rawDumpFeatureMask,
2779 CAM_STREAM_TYPE_RAW,
2780 gCamCapability[mCameraId]->color_arrangement);
2781 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2782 mChannelHandle,
2783 mCameraHandle->ops,
2784 rawDumpSize,
2785 &padding_info,
2786 this, rawDumpFeatureMask);
2787 if (!mRawDumpChannel) {
2788 LOGE("Raw Dump channel cannot be created");
2789 pthread_mutex_unlock(&mMutex);
2790 return -ENOMEM;
2791 }
2792 }
2793
Thierry Strudel3d639192016-09-09 11:52:26 -07002794 if (mAnalysisChannel) {
2795 cam_analysis_info_t analysisInfo;
2796 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2797 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2798 CAM_STREAM_TYPE_ANALYSIS;
2799 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2800 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002801 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002802 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2803 &analysisInfo);
2804 if (rc != NO_ERROR) {
2805 LOGE("getAnalysisInfo failed, ret = %d", rc);
2806 pthread_mutex_unlock(&mMutex);
2807 return rc;
2808 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002809 cam_color_filter_arrangement_t analysis_color_arrangement =
2810 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2811 CAM_FILTER_ARRANGEMENT_Y :
2812 gCamCapability[mCameraId]->color_arrangement);
2813 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2814 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2815 analysis_color_arrangement);
2816
Thierry Strudel3d639192016-09-09 11:52:26 -07002817 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002818 mCommon.getMatchingDimension(previewSize,
2819 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002820 mStreamConfigInfo.num_streams++;
2821 }
2822
Thierry Strudel2896d122017-02-23 19:18:03 -08002823 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002824 cam_analysis_info_t supportInfo;
2825 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2826 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2827 setPAAFSupport(callbackFeatureMask,
2828 CAM_STREAM_TYPE_CALLBACK,
2829 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002830 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002831 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002832 if (ret != NO_ERROR) {
2833 /* Ignore the error for Mono camera
2834 * because the PAAF bit mask is only set
2835 * for CAM_STREAM_TYPE_ANALYSIS stream type
2836 */
2837 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2838 LOGW("getAnalysisInfo failed, ret = %d", ret);
2839 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002840 }
2841 mSupportChannel = new QCamera3SupportChannel(
2842 mCameraHandle->camera_handle,
2843 mChannelHandle,
2844 mCameraHandle->ops,
2845 &gCamCapability[mCameraId]->padding_info,
2846 callbackFeatureMask,
2847 CAM_STREAM_TYPE_CALLBACK,
2848 &QCamera3SupportChannel::kDim,
2849 CAM_FORMAT_YUV_420_NV21,
2850 supportInfo.hw_analysis_supported,
2851 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002852 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002853 if (!mSupportChannel) {
2854 LOGE("dummy channel cannot be created");
2855 pthread_mutex_unlock(&mMutex);
2856 return -ENOMEM;
2857 }
2858 }
2859
2860 if (mSupportChannel) {
2861 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2862 QCamera3SupportChannel::kDim;
2863 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2864 CAM_STREAM_TYPE_CALLBACK;
2865 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2866 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2867 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2868 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2869 gCamCapability[mCameraId]->color_arrangement);
2870 mStreamConfigInfo.num_streams++;
2871 }
2872
2873 if (mRawDumpChannel) {
2874 cam_dimension_t rawSize;
2875 rawSize = getMaxRawSize(mCameraId);
2876 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2877 rawSize;
2878 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2879 CAM_STREAM_TYPE_RAW;
2880 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2881 CAM_QCOM_FEATURE_NONE;
2882 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2883 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2884 gCamCapability[mCameraId]->color_arrangement);
2885 mStreamConfigInfo.num_streams++;
2886 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002887
2888 if (mHdrPlusRawSrcChannel) {
2889 cam_dimension_t rawSize;
2890 rawSize = getMaxRawSize(mCameraId);
2891 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2892 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2893 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2894 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2895 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2896 gCamCapability[mCameraId]->color_arrangement);
2897 mStreamConfigInfo.num_streams++;
2898 }
2899
Thierry Strudel3d639192016-09-09 11:52:26 -07002900 /* In HFR mode, if video stream is not added, create a dummy channel so that
2901 * ISP can create a batch mode even for preview only case. This channel is
2902 * never 'start'ed (no stream-on), it is only 'initialized' */
2903 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2904 !m_bIsVideo) {
2905 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2906 setPAAFSupport(dummyFeatureMask,
2907 CAM_STREAM_TYPE_VIDEO,
2908 gCamCapability[mCameraId]->color_arrangement);
2909 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2910 mChannelHandle,
2911 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002912 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002913 this,
2914 &mDummyBatchStream,
2915 CAM_STREAM_TYPE_VIDEO,
2916 dummyFeatureMask,
2917 mMetadataChannel);
2918 if (NULL == mDummyBatchChannel) {
2919 LOGE("creation of mDummyBatchChannel failed."
2920 "Preview will use non-hfr sensor mode ");
2921 }
2922 }
2923 if (mDummyBatchChannel) {
2924 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2925 mDummyBatchStream.width;
2926 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2927 mDummyBatchStream.height;
2928 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2929 CAM_STREAM_TYPE_VIDEO;
2930 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2931 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2932 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2933 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2934 gCamCapability[mCameraId]->color_arrangement);
2935 mStreamConfigInfo.num_streams++;
2936 }
2937
2938 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2939 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002940 m_bIs4KVideo ? 0 :
2941 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002942
2943 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2944 for (pendingRequestIterator i = mPendingRequestsList.begin();
2945 i != mPendingRequestsList.end();) {
2946 i = erasePendingRequest(i);
2947 }
2948 mPendingFrameDropList.clear();
2949 // Initialize/Reset the pending buffers list
2950 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2951 req.mPendingBufferList.clear();
2952 }
2953 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2954
Thierry Strudel3d639192016-09-09 11:52:26 -07002955 mCurJpegMeta.clear();
2956 //Get min frame duration for this streams configuration
2957 deriveMinFrameDuration();
2958
Chien-Yu Chenee335912017-02-09 17:53:20 -08002959 mFirstPreviewIntentSeen = false;
2960
2961 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002962 {
2963 Mutex::Autolock l(gHdrPlusClientLock);
2964 disableHdrPlusModeLocked();
2965 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002966
Thierry Strudel3d639192016-09-09 11:52:26 -07002967 // Update state
2968 mState = CONFIGURED;
2969
Shuzhen Wang3c077d72017-04-20 22:48:59 -07002970 mFirstMetadataCallback = true;
2971
Thierry Strudel3d639192016-09-09 11:52:26 -07002972 pthread_mutex_unlock(&mMutex);
2973
2974 return rc;
2975}
2976
2977/*===========================================================================
2978 * FUNCTION : validateCaptureRequest
2979 *
2980 * DESCRIPTION: validate a capture request from camera service
2981 *
2982 * PARAMETERS :
2983 * @request : request from framework to process
2984 *
2985 * RETURN :
2986 *
2987 *==========================================================================*/
2988int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002989 camera3_capture_request_t *request,
2990 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002991{
2992 ssize_t idx = 0;
2993 const camera3_stream_buffer_t *b;
2994 CameraMetadata meta;
2995
2996 /* Sanity check the request */
2997 if (request == NULL) {
2998 LOGE("NULL capture request");
2999 return BAD_VALUE;
3000 }
3001
3002 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3003 /*settings cannot be null for the first request*/
3004 return BAD_VALUE;
3005 }
3006
3007 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003008 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3009 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003010 LOGE("Request %d: No output buffers provided!",
3011 __FUNCTION__, frameNumber);
3012 return BAD_VALUE;
3013 }
3014 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3015 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3016 request->num_output_buffers, MAX_NUM_STREAMS);
3017 return BAD_VALUE;
3018 }
3019 if (request->input_buffer != NULL) {
3020 b = request->input_buffer;
3021 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3022 LOGE("Request %d: Buffer %ld: Status not OK!",
3023 frameNumber, (long)idx);
3024 return BAD_VALUE;
3025 }
3026 if (b->release_fence != -1) {
3027 LOGE("Request %d: Buffer %ld: Has a release fence!",
3028 frameNumber, (long)idx);
3029 return BAD_VALUE;
3030 }
3031 if (b->buffer == NULL) {
3032 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3033 frameNumber, (long)idx);
3034 return BAD_VALUE;
3035 }
3036 }
3037
3038 // Validate all buffers
3039 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003040 if (b == NULL) {
3041 return BAD_VALUE;
3042 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003043 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003044 QCamera3ProcessingChannel *channel =
3045 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3046 if (channel == NULL) {
3047 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3048 frameNumber, (long)idx);
3049 return BAD_VALUE;
3050 }
3051 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3052 LOGE("Request %d: Buffer %ld: Status not OK!",
3053 frameNumber, (long)idx);
3054 return BAD_VALUE;
3055 }
3056 if (b->release_fence != -1) {
3057 LOGE("Request %d: Buffer %ld: Has a release fence!",
3058 frameNumber, (long)idx);
3059 return BAD_VALUE;
3060 }
3061 if (b->buffer == NULL) {
3062 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3063 frameNumber, (long)idx);
3064 return BAD_VALUE;
3065 }
3066 if (*(b->buffer) == NULL) {
3067 LOGE("Request %d: Buffer %ld: NULL private handle!",
3068 frameNumber, (long)idx);
3069 return BAD_VALUE;
3070 }
3071 idx++;
3072 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003073 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003074 return NO_ERROR;
3075}
3076
3077/*===========================================================================
3078 * FUNCTION : deriveMinFrameDuration
3079 *
3080 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3081 * on currently configured streams.
3082 *
3083 * PARAMETERS : NONE
3084 *
3085 * RETURN : NONE
3086 *
3087 *==========================================================================*/
3088void QCamera3HardwareInterface::deriveMinFrameDuration()
3089{
3090 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3091
3092 maxJpegDim = 0;
3093 maxProcessedDim = 0;
3094 maxRawDim = 0;
3095
3096 // Figure out maximum jpeg, processed, and raw dimensions
3097 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3098 it != mStreamInfo.end(); it++) {
3099
3100 // Input stream doesn't have valid stream_type
3101 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3102 continue;
3103
3104 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3105 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3106 if (dimension > maxJpegDim)
3107 maxJpegDim = dimension;
3108 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3109 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3110 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3111 if (dimension > maxRawDim)
3112 maxRawDim = dimension;
3113 } else {
3114 if (dimension > maxProcessedDim)
3115 maxProcessedDim = dimension;
3116 }
3117 }
3118
3119 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3120 MAX_SIZES_CNT);
3121
3122 //Assume all jpeg dimensions are in processed dimensions.
3123 if (maxJpegDim > maxProcessedDim)
3124 maxProcessedDim = maxJpegDim;
3125 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3126 if (maxProcessedDim > maxRawDim) {
3127 maxRawDim = INT32_MAX;
3128
3129 for (size_t i = 0; i < count; i++) {
3130 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3131 gCamCapability[mCameraId]->raw_dim[i].height;
3132 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3133 maxRawDim = dimension;
3134 }
3135 }
3136
3137 //Find minimum durations for processed, jpeg, and raw
3138 for (size_t i = 0; i < count; i++) {
3139 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3140 gCamCapability[mCameraId]->raw_dim[i].height) {
3141 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3142 break;
3143 }
3144 }
3145 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3146 for (size_t i = 0; i < count; i++) {
3147 if (maxProcessedDim ==
3148 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3149 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3150 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3151 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3152 break;
3153 }
3154 }
3155}
3156
3157/*===========================================================================
3158 * FUNCTION : getMinFrameDuration
3159 *
3160 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3161 * and current request configuration.
3162 *
3163 * PARAMETERS : @request: requset sent by the frameworks
3164 *
3165 * RETURN : min farme duration for a particular request
3166 *
3167 *==========================================================================*/
3168int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3169{
3170 bool hasJpegStream = false;
3171 bool hasRawStream = false;
3172 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3173 const camera3_stream_t *stream = request->output_buffers[i].stream;
3174 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3175 hasJpegStream = true;
3176 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3177 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3178 stream->format == HAL_PIXEL_FORMAT_RAW16)
3179 hasRawStream = true;
3180 }
3181
3182 if (!hasJpegStream)
3183 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3184 else
3185 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3186}
3187
3188/*===========================================================================
3189 * FUNCTION : handleBuffersDuringFlushLock
3190 *
3191 * DESCRIPTION: Account for buffers returned from back-end during flush
3192 * This function is executed while mMutex is held by the caller.
3193 *
3194 * PARAMETERS :
3195 * @buffer: image buffer for the callback
3196 *
3197 * RETURN :
3198 *==========================================================================*/
3199void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3200{
3201 bool buffer_found = false;
3202 for (List<PendingBuffersInRequest>::iterator req =
3203 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3204 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3205 for (List<PendingBufferInfo>::iterator i =
3206 req->mPendingBufferList.begin();
3207 i != req->mPendingBufferList.end(); i++) {
3208 if (i->buffer == buffer->buffer) {
3209 mPendingBuffersMap.numPendingBufsAtFlush--;
3210 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3211 buffer->buffer, req->frame_number,
3212 mPendingBuffersMap.numPendingBufsAtFlush);
3213 buffer_found = true;
3214 break;
3215 }
3216 }
3217 if (buffer_found) {
3218 break;
3219 }
3220 }
3221 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3222 //signal the flush()
3223 LOGD("All buffers returned to HAL. Continue flush");
3224 pthread_cond_signal(&mBuffersCond);
3225 }
3226}
3227
Thierry Strudel3d639192016-09-09 11:52:26 -07003228/*===========================================================================
3229 * FUNCTION : handleBatchMetadata
3230 *
3231 * DESCRIPTION: Handles metadata buffer callback in batch mode
3232 *
3233 * PARAMETERS : @metadata_buf: metadata buffer
3234 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3235 * the meta buf in this method
3236 *
3237 * RETURN :
3238 *
3239 *==========================================================================*/
3240void QCamera3HardwareInterface::handleBatchMetadata(
3241 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3242{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003243 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003244
3245 if (NULL == metadata_buf) {
3246 LOGE("metadata_buf is NULL");
3247 return;
3248 }
3249 /* In batch mode, the metdata will contain the frame number and timestamp of
3250 * the last frame in the batch. Eg: a batch containing buffers from request
3251 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3252 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3253 * multiple process_capture_results */
3254 metadata_buffer_t *metadata =
3255 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3256 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3257 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3258 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3259 uint32_t frame_number = 0, urgent_frame_number = 0;
3260 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3261 bool invalid_metadata = false;
3262 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3263 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003264 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003265
3266 int32_t *p_frame_number_valid =
3267 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3268 uint32_t *p_frame_number =
3269 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3270 int64_t *p_capture_time =
3271 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3272 int32_t *p_urgent_frame_number_valid =
3273 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3274 uint32_t *p_urgent_frame_number =
3275 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3276
3277 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3278 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3279 (NULL == p_urgent_frame_number)) {
3280 LOGE("Invalid metadata");
3281 invalid_metadata = true;
3282 } else {
3283 frame_number_valid = *p_frame_number_valid;
3284 last_frame_number = *p_frame_number;
3285 last_frame_capture_time = *p_capture_time;
3286 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3287 last_urgent_frame_number = *p_urgent_frame_number;
3288 }
3289
3290 /* In batchmode, when no video buffers are requested, set_parms are sent
3291 * for every capture_request. The difference between consecutive urgent
3292 * frame numbers and frame numbers should be used to interpolate the
3293 * corresponding frame numbers and time stamps */
3294 pthread_mutex_lock(&mMutex);
3295 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003296 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3297 if(idx < 0) {
3298 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3299 last_urgent_frame_number);
3300 mState = ERROR;
3301 pthread_mutex_unlock(&mMutex);
3302 return;
3303 }
3304 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003305 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3306 first_urgent_frame_number;
3307
3308 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3309 urgent_frame_number_valid,
3310 first_urgent_frame_number, last_urgent_frame_number);
3311 }
3312
3313 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003314 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3315 if(idx < 0) {
3316 LOGE("Invalid frame number received: %d. Irrecoverable error",
3317 last_frame_number);
3318 mState = ERROR;
3319 pthread_mutex_unlock(&mMutex);
3320 return;
3321 }
3322 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003323 frameNumDiff = last_frame_number + 1 -
3324 first_frame_number;
3325 mPendingBatchMap.removeItem(last_frame_number);
3326
3327 LOGD("frm: valid: %d frm_num: %d - %d",
3328 frame_number_valid,
3329 first_frame_number, last_frame_number);
3330
3331 }
3332 pthread_mutex_unlock(&mMutex);
3333
3334 if (urgent_frame_number_valid || frame_number_valid) {
3335 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3336 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3337 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3338 urgentFrameNumDiff, last_urgent_frame_number);
3339 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3340 LOGE("frameNumDiff: %d frameNum: %d",
3341 frameNumDiff, last_frame_number);
3342 }
3343
3344 for (size_t i = 0; i < loopCount; i++) {
3345 /* handleMetadataWithLock is called even for invalid_metadata for
3346 * pipeline depth calculation */
3347 if (!invalid_metadata) {
3348 /* Infer frame number. Batch metadata contains frame number of the
3349 * last frame */
3350 if (urgent_frame_number_valid) {
3351 if (i < urgentFrameNumDiff) {
3352 urgent_frame_number =
3353 first_urgent_frame_number + i;
3354 LOGD("inferred urgent frame_number: %d",
3355 urgent_frame_number);
3356 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3357 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3358 } else {
3359 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3360 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3361 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3362 }
3363 }
3364
3365 /* Infer frame number. Batch metadata contains frame number of the
3366 * last frame */
3367 if (frame_number_valid) {
3368 if (i < frameNumDiff) {
3369 frame_number = first_frame_number + i;
3370 LOGD("inferred frame_number: %d", frame_number);
3371 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3372 CAM_INTF_META_FRAME_NUMBER, frame_number);
3373 } else {
3374 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3375 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3376 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3377 }
3378 }
3379
3380 if (last_frame_capture_time) {
3381 //Infer timestamp
3382 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003383 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003384 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003385 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003386 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3387 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3388 LOGD("batch capture_time: %lld, capture_time: %lld",
3389 last_frame_capture_time, capture_time);
3390 }
3391 }
3392 pthread_mutex_lock(&mMutex);
3393 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003394 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003395 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3396 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003397 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003398 pthread_mutex_unlock(&mMutex);
3399 }
3400
3401 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003402 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003403 mMetadataChannel->bufDone(metadata_buf);
3404 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003405 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003406 }
3407}
3408
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003409void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3410 camera3_error_msg_code_t errorCode)
3411{
3412 camera3_notify_msg_t notify_msg;
3413 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3414 notify_msg.type = CAMERA3_MSG_ERROR;
3415 notify_msg.message.error.error_code = errorCode;
3416 notify_msg.message.error.error_stream = NULL;
3417 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003418 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003419
3420 return;
3421}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003422
3423/*===========================================================================
3424 * FUNCTION : sendPartialMetadataWithLock
3425 *
3426 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3427 *
3428 * PARAMETERS : @metadata: metadata buffer
3429 * @requestIter: The iterator for the pending capture request for
3430 * which the partial result is being sen
3431 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3432 * last urgent metadata in a batch. Always true for non-batch mode
3433 *
3434 * RETURN :
3435 *
3436 *==========================================================================*/
3437
3438void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3439 metadata_buffer_t *metadata,
3440 const pendingRequestIterator requestIter,
3441 bool lastUrgentMetadataInBatch)
3442{
3443 camera3_capture_result_t result;
3444 memset(&result, 0, sizeof(camera3_capture_result_t));
3445
3446 requestIter->partial_result_cnt++;
3447
3448 // Extract 3A metadata
3449 result.result = translateCbUrgentMetadataToResultMetadata(
3450 metadata, lastUrgentMetadataInBatch);
3451 // Populate metadata result
3452 result.frame_number = requestIter->frame_number;
3453 result.num_output_buffers = 0;
3454 result.output_buffers = NULL;
3455 result.partial_result = requestIter->partial_result_cnt;
3456
3457 {
3458 Mutex::Autolock l(gHdrPlusClientLock);
3459 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3460 // Notify HDR+ client about the partial metadata.
3461 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3462 result.partial_result == PARTIAL_RESULT_COUNT);
3463 }
3464 }
3465
3466 orchestrateResult(&result);
3467 LOGD("urgent frame_number = %u", result.frame_number);
3468 free_camera_metadata((camera_metadata_t *)result.result);
3469}
3470
Thierry Strudel3d639192016-09-09 11:52:26 -07003471/*===========================================================================
3472 * FUNCTION : handleMetadataWithLock
3473 *
3474 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3475 *
3476 * PARAMETERS : @metadata_buf: metadata buffer
3477 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3478 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003479 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3480 * last urgent metadata in a batch. Always true for non-batch mode
3481 * @lastMetadataInBatch: Boolean to indicate whether this is the
3482 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003483 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3484 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003485 *
3486 * RETURN :
3487 *
3488 *==========================================================================*/
3489void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003490 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003491 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3492 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003493{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003494 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3496 //during flush do not send metadata from this thread
3497 LOGD("not sending metadata during flush or when mState is error");
3498 if (free_and_bufdone_meta_buf) {
3499 mMetadataChannel->bufDone(metadata_buf);
3500 free(metadata_buf);
3501 }
3502 return;
3503 }
3504
3505 //not in flush
3506 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3507 int32_t frame_number_valid, urgent_frame_number_valid;
3508 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003509 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003510 nsecs_t currentSysTime;
3511
3512 int32_t *p_frame_number_valid =
3513 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3514 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3515 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003516 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003517 int32_t *p_urgent_frame_number_valid =
3518 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3519 uint32_t *p_urgent_frame_number =
3520 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3521 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3522 metadata) {
3523 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3524 *p_frame_number_valid, *p_frame_number);
3525 }
3526
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003527 camera_metadata_t *resultMetadata = nullptr;
3528
Thierry Strudel3d639192016-09-09 11:52:26 -07003529 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3530 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3531 LOGE("Invalid metadata");
3532 if (free_and_bufdone_meta_buf) {
3533 mMetadataChannel->bufDone(metadata_buf);
3534 free(metadata_buf);
3535 }
3536 goto done_metadata;
3537 }
3538 frame_number_valid = *p_frame_number_valid;
3539 frame_number = *p_frame_number;
3540 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003541 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003542 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3543 urgent_frame_number = *p_urgent_frame_number;
3544 currentSysTime = systemTime(CLOCK_MONOTONIC);
3545
Jason Lee603176d2017-05-31 11:43:27 -07003546 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3547 const int tries = 3;
3548 nsecs_t bestGap, measured;
3549 for (int i = 0; i < tries; ++i) {
3550 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3551 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3552 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3553 const nsecs_t gap = tmono2 - tmono;
3554 if (i == 0 || gap < bestGap) {
3555 bestGap = gap;
3556 measured = tbase - ((tmono + tmono2) >> 1);
3557 }
3558 }
3559 capture_time -= measured;
3560 }
3561
Thierry Strudel3d639192016-09-09 11:52:26 -07003562 // Detect if buffers from any requests are overdue
3563 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003564 int64_t timeout;
3565 {
3566 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3567 // If there is a pending HDR+ request, the following requests may be blocked until the
3568 // HDR+ request is done. So allow a longer timeout.
3569 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3570 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3571 }
3572
3573 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003574 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003575 assert(missed.stream->priv);
3576 if (missed.stream->priv) {
3577 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3578 assert(ch->mStreams[0]);
3579 if (ch->mStreams[0]) {
3580 LOGE("Cancel missing frame = %d, buffer = %p,"
3581 "stream type = %d, stream format = %d",
3582 req.frame_number, missed.buffer,
3583 ch->mStreams[0]->getMyType(), missed.stream->format);
3584 ch->timeoutFrame(req.frame_number);
3585 }
3586 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003587 }
3588 }
3589 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003590 //For the very first metadata callback, regardless whether it contains valid
3591 //frame number, send the partial metadata for the jumpstarting requests.
3592 //Note that this has to be done even if the metadata doesn't contain valid
3593 //urgent frame number, because in the case only 1 request is ever submitted
3594 //to HAL, there won't be subsequent valid urgent frame number.
3595 if (mFirstMetadataCallback) {
3596 for (pendingRequestIterator i =
3597 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3598 if (i->bUseFirstPartial) {
3599 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3600 }
3601 }
3602 mFirstMetadataCallback = false;
3603 }
3604
Thierry Strudel3d639192016-09-09 11:52:26 -07003605 //Partial result on process_capture_result for timestamp
3606 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003607 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003608
3609 //Recieved an urgent Frame Number, handle it
3610 //using partial results
3611 for (pendingRequestIterator i =
3612 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3613 LOGD("Iterator Frame = %d urgent frame = %d",
3614 i->frame_number, urgent_frame_number);
3615
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003616 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003617 (i->partial_result_cnt == 0)) {
3618 LOGE("Error: HAL missed urgent metadata for frame number %d",
3619 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003620 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003621 }
3622
3623 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003624 i->partial_result_cnt == 0) {
3625 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003626 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3627 // Instant AEC settled for this frame.
3628 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3629 mInstantAECSettledFrameNumber = urgent_frame_number;
3630 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003631 break;
3632 }
3633 }
3634 }
3635
3636 if (!frame_number_valid) {
3637 LOGD("Not a valid normal frame number, used as SOF only");
3638 if (free_and_bufdone_meta_buf) {
3639 mMetadataChannel->bufDone(metadata_buf);
3640 free(metadata_buf);
3641 }
3642 goto done_metadata;
3643 }
3644 LOGH("valid frame_number = %u, capture_time = %lld",
3645 frame_number, capture_time);
3646
Emilian Peev7650c122017-01-19 08:24:33 -08003647 if (metadata->is_depth_data_valid) {
3648 handleDepthDataLocked(metadata->depth_data, frame_number);
3649 }
3650
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003651 // Check whether any stream buffer corresponding to this is dropped or not
3652 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3653 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3654 for (auto & pendingRequest : mPendingRequestsList) {
3655 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3656 mInstantAECSettledFrameNumber)) {
3657 camera3_notify_msg_t notify_msg = {};
3658 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003659 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003660 QCamera3ProcessingChannel *channel =
3661 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003662 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003663 if (p_cam_frame_drop) {
3664 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003665 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003666 // Got the stream ID for drop frame.
3667 dropFrame = true;
3668 break;
3669 }
3670 }
3671 } else {
3672 // This is instant AEC case.
3673 // For instant AEC drop the stream untill AEC is settled.
3674 dropFrame = true;
3675 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003676
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003677 if (dropFrame) {
3678 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3679 if (p_cam_frame_drop) {
3680 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003681 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003682 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003683 } else {
3684 // For instant AEC, inform frame drop and frame number
3685 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3686 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003687 pendingRequest.frame_number, streamID,
3688 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003689 }
3690 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003691 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003692 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003693 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003694 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003695 if (p_cam_frame_drop) {
3696 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003697 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003698 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003699 } else {
3700 // For instant AEC, inform frame drop and frame number
3701 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3702 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003703 pendingRequest.frame_number, streamID,
3704 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003705 }
3706 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003707 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003708 PendingFrameDrop.stream_ID = streamID;
3709 // Add the Frame drop info to mPendingFrameDropList
3710 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003711 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003712 }
3713 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003714 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003715
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003716 for (auto & pendingRequest : mPendingRequestsList) {
3717 // Find the pending request with the frame number.
3718 if (pendingRequest.frame_number == frame_number) {
3719 // Update the sensor timestamp.
3720 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003721
Thierry Strudel3d639192016-09-09 11:52:26 -07003722
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003723 /* Set the timestamp in display metadata so that clients aware of
3724 private_handle such as VT can use this un-modified timestamps.
3725 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003726 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003727
Thierry Strudel3d639192016-09-09 11:52:26 -07003728 // Find channel requiring metadata, meaning internal offline postprocess
3729 // is needed.
3730 //TODO: for now, we don't support two streams requiring metadata at the same time.
3731 // (because we are not making copies, and metadata buffer is not reference counted.
3732 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003733 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3734 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003735 if (iter->need_metadata) {
3736 internalPproc = true;
3737 QCamera3ProcessingChannel *channel =
3738 (QCamera3ProcessingChannel *)iter->stream->priv;
3739 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003740 if(p_is_metabuf_queued != NULL) {
3741 *p_is_metabuf_queued = true;
3742 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003743 break;
3744 }
3745 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003746 for (auto itr = pendingRequest.internalRequestList.begin();
3747 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003748 if (itr->need_metadata) {
3749 internalPproc = true;
3750 QCamera3ProcessingChannel *channel =
3751 (QCamera3ProcessingChannel *)itr->stream->priv;
3752 channel->queueReprocMetadata(metadata_buf);
3753 break;
3754 }
3755 }
3756
Thierry Strudel54dc9782017-02-15 12:12:10 -08003757 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003758
3759 bool *enableZsl = nullptr;
3760 if (gExposeEnableZslKey) {
3761 enableZsl = &pendingRequest.enableZsl;
3762 }
3763
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003764 resultMetadata = translateFromHalMetadata(metadata,
3765 pendingRequest.timestamp, pendingRequest.request_id,
3766 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3767 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003768 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003769 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003770 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003771 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003772 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003773 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003774
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003775 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003776
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003777 if (pendingRequest.blob_request) {
3778 //Dump tuning metadata if enabled and available
3779 char prop[PROPERTY_VALUE_MAX];
3780 memset(prop, 0, sizeof(prop));
3781 property_get("persist.camera.dumpmetadata", prop, "0");
3782 int32_t enabled = atoi(prop);
3783 if (enabled && metadata->is_tuning_params_valid) {
3784 dumpMetadataToFile(metadata->tuning_params,
3785 mMetaFrameCount,
3786 enabled,
3787 "Snapshot",
3788 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003789 }
3790 }
3791
3792 if (!internalPproc) {
3793 LOGD("couldn't find need_metadata for this metadata");
3794 // Return metadata buffer
3795 if (free_and_bufdone_meta_buf) {
3796 mMetadataChannel->bufDone(metadata_buf);
3797 free(metadata_buf);
3798 }
3799 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003800
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003801 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003802 }
3803 }
3804
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003805 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3806
3807 // Try to send out capture result metadata.
3808 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003809 return;
3810
Thierry Strudel3d639192016-09-09 11:52:26 -07003811done_metadata:
3812 for (pendingRequestIterator i = mPendingRequestsList.begin();
3813 i != mPendingRequestsList.end() ;i++) {
3814 i->pipeline_depth++;
3815 }
3816 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3817 unblockRequestIfNecessary();
3818}
3819
3820/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003821 * FUNCTION : handleDepthDataWithLock
3822 *
3823 * DESCRIPTION: Handles incoming depth data
3824 *
3825 * PARAMETERS : @depthData : Depth data
3826 * @frameNumber: Frame number of the incoming depth data
3827 *
3828 * RETURN :
3829 *
3830 *==========================================================================*/
3831void QCamera3HardwareInterface::handleDepthDataLocked(
3832 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3833 uint32_t currentFrameNumber;
3834 buffer_handle_t *depthBuffer;
3835
3836 if (nullptr == mDepthChannel) {
3837 LOGE("Depth channel not present!");
3838 return;
3839 }
3840
3841 camera3_stream_buffer_t resultBuffer =
3842 {.acquire_fence = -1,
3843 .release_fence = -1,
3844 .status = CAMERA3_BUFFER_STATUS_OK,
3845 .buffer = nullptr,
3846 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003847 do {
3848 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3849 if (nullptr == depthBuffer) {
3850 break;
3851 }
3852
Emilian Peev7650c122017-01-19 08:24:33 -08003853 resultBuffer.buffer = depthBuffer;
3854 if (currentFrameNumber == frameNumber) {
3855 int32_t rc = mDepthChannel->populateDepthData(depthData,
3856 frameNumber);
3857 if (NO_ERROR != rc) {
3858 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3859 } else {
3860 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3861 }
3862 } else if (currentFrameNumber > frameNumber) {
3863 break;
3864 } else {
3865 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3866 {{currentFrameNumber, mDepthChannel->getStream(),
3867 CAMERA3_MSG_ERROR_BUFFER}}};
3868 orchestrateNotify(&notify_msg);
3869
3870 LOGE("Depth buffer for frame number: %d is missing "
3871 "returning back!", currentFrameNumber);
3872 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3873 }
3874 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003875 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003876 } while (currentFrameNumber < frameNumber);
3877}
3878
3879/*===========================================================================
3880 * FUNCTION : notifyErrorFoPendingDepthData
3881 *
3882 * DESCRIPTION: Returns error for any pending depth buffers
3883 *
3884 * PARAMETERS : depthCh - depth channel that needs to get flushed
3885 *
3886 * RETURN :
3887 *
3888 *==========================================================================*/
3889void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3890 QCamera3DepthChannel *depthCh) {
3891 uint32_t currentFrameNumber;
3892 buffer_handle_t *depthBuffer;
3893
3894 if (nullptr == depthCh) {
3895 return;
3896 }
3897
3898 camera3_notify_msg_t notify_msg =
3899 {.type = CAMERA3_MSG_ERROR,
3900 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3901 camera3_stream_buffer_t resultBuffer =
3902 {.acquire_fence = -1,
3903 .release_fence = -1,
3904 .buffer = nullptr,
3905 .stream = depthCh->getStream(),
3906 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003907
3908 while (nullptr !=
3909 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3910 depthCh->unmapBuffer(currentFrameNumber);
3911
3912 notify_msg.message.error.frame_number = currentFrameNumber;
3913 orchestrateNotify(&notify_msg);
3914
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003915 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003916 };
3917}
3918
3919/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003920 * FUNCTION : hdrPlusPerfLock
3921 *
3922 * DESCRIPTION: perf lock for HDR+ using custom intent
3923 *
3924 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3925 *
3926 * RETURN : None
3927 *
3928 *==========================================================================*/
3929void QCamera3HardwareInterface::hdrPlusPerfLock(
3930 mm_camera_super_buf_t *metadata_buf)
3931{
3932 if (NULL == metadata_buf) {
3933 LOGE("metadata_buf is NULL");
3934 return;
3935 }
3936 metadata_buffer_t *metadata =
3937 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3938 int32_t *p_frame_number_valid =
3939 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3940 uint32_t *p_frame_number =
3941 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3942
3943 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3944 LOGE("%s: Invalid metadata", __func__);
3945 return;
3946 }
3947
3948 //acquire perf lock for 5 sec after the last HDR frame is captured
3949 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3950 if ((p_frame_number != NULL) &&
3951 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003952 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003953 }
3954 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003955}
3956
3957/*===========================================================================
3958 * FUNCTION : handleInputBufferWithLock
3959 *
3960 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3961 *
3962 * PARAMETERS : @frame_number: frame number of the input buffer
3963 *
3964 * RETURN :
3965 *
3966 *==========================================================================*/
3967void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3968{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003969 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003970 pendingRequestIterator i = mPendingRequestsList.begin();
3971 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3972 i++;
3973 }
3974 if (i != mPendingRequestsList.end() && i->input_buffer) {
3975 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003976 CameraMetadata settings;
3977 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3978 if(i->settings) {
3979 settings = i->settings;
3980 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3981 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07003982 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003983 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07003984 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003985 } else {
3986 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07003987 }
3988
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003989 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3990 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3991 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07003992
3993 camera3_capture_result result;
3994 memset(&result, 0, sizeof(camera3_capture_result));
3995 result.frame_number = frame_number;
3996 result.result = i->settings;
3997 result.input_buffer = i->input_buffer;
3998 result.partial_result = PARTIAL_RESULT_COUNT;
3999
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004000 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004001 LOGD("Input request metadata and input buffer frame_number = %u",
4002 i->frame_number);
4003 i = erasePendingRequest(i);
4004 } else {
4005 LOGE("Could not find input request for frame number %d", frame_number);
4006 }
4007}
4008
4009/*===========================================================================
4010 * FUNCTION : handleBufferWithLock
4011 *
4012 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4013 *
4014 * PARAMETERS : @buffer: image buffer for the callback
4015 * @frame_number: frame number of the image buffer
4016 *
4017 * RETURN :
4018 *
4019 *==========================================================================*/
4020void QCamera3HardwareInterface::handleBufferWithLock(
4021 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4022{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004023 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004024
4025 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4026 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4027 }
4028
Thierry Strudel3d639192016-09-09 11:52:26 -07004029 /* Nothing to be done during error state */
4030 if ((ERROR == mState) || (DEINIT == mState)) {
4031 return;
4032 }
4033 if (mFlushPerf) {
4034 handleBuffersDuringFlushLock(buffer);
4035 return;
4036 }
4037 //not in flush
4038 // If the frame number doesn't exist in the pending request list,
4039 // directly send the buffer to the frameworks, and update pending buffers map
4040 // Otherwise, book-keep the buffer.
4041 pendingRequestIterator i = mPendingRequestsList.begin();
4042 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4043 i++;
4044 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004045
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004046 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004047 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004048 // For a reprocessing request, try to send out result metadata.
4049 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004050 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004051 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004052
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004053 // Check if this frame was dropped.
4054 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4055 m != mPendingFrameDropList.end(); m++) {
4056 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4057 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4058 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4059 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4060 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4061 frame_number, streamID);
4062 m = mPendingFrameDropList.erase(m);
4063 break;
4064 }
4065 }
4066
4067 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4068 LOGH("result frame_number = %d, buffer = %p",
4069 frame_number, buffer->buffer);
4070
4071 mPendingBuffersMap.removeBuf(buffer->buffer);
4072 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4073
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004074 if (mPreviewStarted == false) {
4075 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4076 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004077 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4078
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004079 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4080 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4081 mPreviewStarted = true;
4082
4083 // Set power hint for preview
4084 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4085 }
4086 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004087}
4088
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004089void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004090 const camera_metadata_t *resultMetadata)
4091{
4092 // Find the pending request for this result metadata.
4093 auto requestIter = mPendingRequestsList.begin();
4094 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4095 requestIter++;
4096 }
4097
4098 if (requestIter == mPendingRequestsList.end()) {
4099 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4100 return;
4101 }
4102
4103 // Update the result metadata
4104 requestIter->resultMetadata = resultMetadata;
4105
4106 // Check what type of request this is.
4107 bool liveRequest = false;
4108 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004109 // HDR+ request doesn't have partial results.
4110 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004111 } else if (requestIter->input_buffer != nullptr) {
4112 // Reprocessing request result is the same as settings.
4113 requestIter->resultMetadata = requestIter->settings;
4114 // Reprocessing request doesn't have partial results.
4115 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4116 } else {
4117 liveRequest = true;
4118 requestIter->partial_result_cnt++;
4119 mPendingLiveRequest--;
4120
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004121 {
4122 Mutex::Autolock l(gHdrPlusClientLock);
4123 // For a live request, send the metadata to HDR+ client.
4124 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4125 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4126 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4127 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004128 }
4129 }
4130
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004131 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4132 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004133 bool readyToSend = true;
4134
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004135 // Iterate through the pending requests to send out result metadata that are ready. Also if
4136 // this result metadata belongs to a live request, notify errors for previous live requests
4137 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004138 auto iter = mPendingRequestsList.begin();
4139 while (iter != mPendingRequestsList.end()) {
4140 // Check if current pending request is ready. If it's not ready, the following pending
4141 // requests are also not ready.
4142 if (readyToSend && iter->resultMetadata == nullptr) {
4143 readyToSend = false;
4144 }
4145
4146 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4147
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004148 camera3_capture_result_t result = {};
4149 result.frame_number = iter->frame_number;
4150 result.result = iter->resultMetadata;
4151 result.partial_result = iter->partial_result_cnt;
4152
4153 // If this pending buffer has result metadata, we may be able to send out shutter callback
4154 // and result metadata.
4155 if (iter->resultMetadata != nullptr) {
4156 if (!readyToSend) {
4157 // If any of the previous pending request is not ready, this pending request is
4158 // also not ready to send in order to keep shutter callbacks and result metadata
4159 // in order.
4160 iter++;
4161 continue;
4162 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004163 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4164 // If the result metadata belongs to a live request, notify errors for previous pending
4165 // live requests.
4166 mPendingLiveRequest--;
4167
4168 CameraMetadata dummyMetadata;
4169 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4170 result.result = dummyMetadata.release();
4171
4172 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004173
4174 // partial_result should be PARTIAL_RESULT_CNT in case of
4175 // ERROR_RESULT.
4176 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4177 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004178 } else {
4179 iter++;
4180 continue;
4181 }
4182
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004183 result.output_buffers = nullptr;
4184 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004185 orchestrateResult(&result);
4186
4187 // For reprocessing, result metadata is the same as settings so do not free it here to
4188 // avoid double free.
4189 if (result.result != iter->settings) {
4190 free_camera_metadata((camera_metadata_t *)result.result);
4191 }
4192 iter->resultMetadata = nullptr;
4193 iter = erasePendingRequest(iter);
4194 }
4195
4196 if (liveRequest) {
4197 for (auto &iter : mPendingRequestsList) {
4198 // Increment pipeline depth for the following pending requests.
4199 if (iter.frame_number > frameNumber) {
4200 iter.pipeline_depth++;
4201 }
4202 }
4203 }
4204
4205 unblockRequestIfNecessary();
4206}
4207
Thierry Strudel3d639192016-09-09 11:52:26 -07004208/*===========================================================================
4209 * FUNCTION : unblockRequestIfNecessary
4210 *
4211 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4212 * that mMutex is held when this function is called.
4213 *
4214 * PARAMETERS :
4215 *
4216 * RETURN :
4217 *
4218 *==========================================================================*/
4219void QCamera3HardwareInterface::unblockRequestIfNecessary()
4220{
4221 // Unblock process_capture_request
4222 pthread_cond_signal(&mRequestCond);
4223}
4224
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004225/*===========================================================================
4226 * FUNCTION : isHdrSnapshotRequest
4227 *
4228 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4229 *
4230 * PARAMETERS : camera3 request structure
4231 *
4232 * RETURN : boolean decision variable
4233 *
4234 *==========================================================================*/
4235bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4236{
4237 if (request == NULL) {
4238 LOGE("Invalid request handle");
4239 assert(0);
4240 return false;
4241 }
4242
4243 if (!mForceHdrSnapshot) {
4244 CameraMetadata frame_settings;
4245 frame_settings = request->settings;
4246
4247 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4248 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4249 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4250 return false;
4251 }
4252 } else {
4253 return false;
4254 }
4255
4256 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4257 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4258 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4259 return false;
4260 }
4261 } else {
4262 return false;
4263 }
4264 }
4265
4266 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4267 if (request->output_buffers[i].stream->format
4268 == HAL_PIXEL_FORMAT_BLOB) {
4269 return true;
4270 }
4271 }
4272
4273 return false;
4274}
4275/*===========================================================================
4276 * FUNCTION : orchestrateRequest
4277 *
4278 * DESCRIPTION: Orchestrates a capture request from camera service
4279 *
4280 * PARAMETERS :
4281 * @request : request from framework to process
4282 *
4283 * RETURN : Error status codes
4284 *
4285 *==========================================================================*/
4286int32_t QCamera3HardwareInterface::orchestrateRequest(
4287 camera3_capture_request_t *request)
4288{
4289
4290 uint32_t originalFrameNumber = request->frame_number;
4291 uint32_t originalOutputCount = request->num_output_buffers;
4292 const camera_metadata_t *original_settings = request->settings;
4293 List<InternalRequest> internallyRequestedStreams;
4294 List<InternalRequest> emptyInternalList;
4295
4296 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4297 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4298 uint32_t internalFrameNumber;
4299 CameraMetadata modified_meta;
4300
4301
4302 /* Add Blob channel to list of internally requested streams */
4303 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4304 if (request->output_buffers[i].stream->format
4305 == HAL_PIXEL_FORMAT_BLOB) {
4306 InternalRequest streamRequested;
4307 streamRequested.meteringOnly = 1;
4308 streamRequested.need_metadata = 0;
4309 streamRequested.stream = request->output_buffers[i].stream;
4310 internallyRequestedStreams.push_back(streamRequested);
4311 }
4312 }
4313 request->num_output_buffers = 0;
4314 auto itr = internallyRequestedStreams.begin();
4315
4316 /* Modify setting to set compensation */
4317 modified_meta = request->settings;
4318 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4319 uint8_t aeLock = 1;
4320 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4321 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4322 camera_metadata_t *modified_settings = modified_meta.release();
4323 request->settings = modified_settings;
4324
4325 /* Capture Settling & -2x frame */
4326 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4327 request->frame_number = internalFrameNumber;
4328 processCaptureRequest(request, internallyRequestedStreams);
4329
4330 request->num_output_buffers = originalOutputCount;
4331 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4332 request->frame_number = internalFrameNumber;
4333 processCaptureRequest(request, emptyInternalList);
4334 request->num_output_buffers = 0;
4335
4336 modified_meta = modified_settings;
4337 expCompensation = 0;
4338 aeLock = 1;
4339 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4340 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4341 modified_settings = modified_meta.release();
4342 request->settings = modified_settings;
4343
4344 /* Capture Settling & 0X frame */
4345
4346 itr = internallyRequestedStreams.begin();
4347 if (itr == internallyRequestedStreams.end()) {
4348 LOGE("Error Internally Requested Stream list is empty");
4349 assert(0);
4350 } else {
4351 itr->need_metadata = 0;
4352 itr->meteringOnly = 1;
4353 }
4354
4355 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4356 request->frame_number = internalFrameNumber;
4357 processCaptureRequest(request, internallyRequestedStreams);
4358
4359 itr = internallyRequestedStreams.begin();
4360 if (itr == internallyRequestedStreams.end()) {
4361 ALOGE("Error Internally Requested Stream list is empty");
4362 assert(0);
4363 } else {
4364 itr->need_metadata = 1;
4365 itr->meteringOnly = 0;
4366 }
4367
4368 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4369 request->frame_number = internalFrameNumber;
4370 processCaptureRequest(request, internallyRequestedStreams);
4371
4372 /* Capture 2X frame*/
4373 modified_meta = modified_settings;
4374 expCompensation = GB_HDR_2X_STEP_EV;
4375 aeLock = 1;
4376 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4377 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4378 modified_settings = modified_meta.release();
4379 request->settings = modified_settings;
4380
4381 itr = internallyRequestedStreams.begin();
4382 if (itr == internallyRequestedStreams.end()) {
4383 ALOGE("Error Internally Requested Stream list is empty");
4384 assert(0);
4385 } else {
4386 itr->need_metadata = 0;
4387 itr->meteringOnly = 1;
4388 }
4389 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4390 request->frame_number = internalFrameNumber;
4391 processCaptureRequest(request, internallyRequestedStreams);
4392
4393 itr = internallyRequestedStreams.begin();
4394 if (itr == internallyRequestedStreams.end()) {
4395 ALOGE("Error Internally Requested Stream list is empty");
4396 assert(0);
4397 } else {
4398 itr->need_metadata = 1;
4399 itr->meteringOnly = 0;
4400 }
4401
4402 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4403 request->frame_number = internalFrameNumber;
4404 processCaptureRequest(request, internallyRequestedStreams);
4405
4406
4407 /* Capture 2X on original streaming config*/
4408 internallyRequestedStreams.clear();
4409
4410 /* Restore original settings pointer */
4411 request->settings = original_settings;
4412 } else {
4413 uint32_t internalFrameNumber;
4414 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4415 request->frame_number = internalFrameNumber;
4416 return processCaptureRequest(request, internallyRequestedStreams);
4417 }
4418
4419 return NO_ERROR;
4420}
4421
4422/*===========================================================================
4423 * FUNCTION : orchestrateResult
4424 *
4425 * DESCRIPTION: Orchestrates a capture result to camera service
4426 *
4427 * PARAMETERS :
4428 * @request : request from framework to process
4429 *
4430 * RETURN :
4431 *
4432 *==========================================================================*/
4433void QCamera3HardwareInterface::orchestrateResult(
4434 camera3_capture_result_t *result)
4435{
4436 uint32_t frameworkFrameNumber;
4437 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4438 frameworkFrameNumber);
4439 if (rc != NO_ERROR) {
4440 LOGE("Cannot find translated frameworkFrameNumber");
4441 assert(0);
4442 } else {
4443 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004444 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004445 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004446 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004447 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4448 camera_metadata_entry_t entry;
4449 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4450 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004451 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004452 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4453 if (ret != OK)
4454 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004455 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004456 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004457 result->frame_number = frameworkFrameNumber;
4458 mCallbackOps->process_capture_result(mCallbackOps, result);
4459 }
4460 }
4461}
4462
4463/*===========================================================================
4464 * FUNCTION : orchestrateNotify
4465 *
4466 * DESCRIPTION: Orchestrates a notify to camera service
4467 *
4468 * PARAMETERS :
4469 * @request : request from framework to process
4470 *
4471 * RETURN :
4472 *
4473 *==========================================================================*/
4474void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4475{
4476 uint32_t frameworkFrameNumber;
4477 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004478 int32_t rc = NO_ERROR;
4479
4480 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004481 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004482
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004483 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004484 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4485 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4486 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004487 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004488 LOGE("Cannot find translated frameworkFrameNumber");
4489 assert(0);
4490 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004491 }
4492 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004493
4494 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4495 LOGD("Internal Request drop the notifyCb");
4496 } else {
4497 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4498 mCallbackOps->notify(mCallbackOps, notify_msg);
4499 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004500}
4501
4502/*===========================================================================
4503 * FUNCTION : FrameNumberRegistry
4504 *
4505 * DESCRIPTION: Constructor
4506 *
4507 * PARAMETERS :
4508 *
4509 * RETURN :
4510 *
4511 *==========================================================================*/
4512FrameNumberRegistry::FrameNumberRegistry()
4513{
4514 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4515}
4516
4517/*===========================================================================
4518 * FUNCTION : ~FrameNumberRegistry
4519 *
4520 * DESCRIPTION: Destructor
4521 *
4522 * PARAMETERS :
4523 *
4524 * RETURN :
4525 *
4526 *==========================================================================*/
4527FrameNumberRegistry::~FrameNumberRegistry()
4528{
4529}
4530
4531/*===========================================================================
4532 * FUNCTION : PurgeOldEntriesLocked
4533 *
4534 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4535 *
4536 * PARAMETERS :
4537 *
4538 * RETURN : NONE
4539 *
4540 *==========================================================================*/
4541void FrameNumberRegistry::purgeOldEntriesLocked()
4542{
4543 while (_register.begin() != _register.end()) {
4544 auto itr = _register.begin();
4545 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4546 _register.erase(itr);
4547 } else {
4548 return;
4549 }
4550 }
4551}
4552
4553/*===========================================================================
4554 * FUNCTION : allocStoreInternalFrameNumber
4555 *
4556 * DESCRIPTION: Method to note down a framework request and associate a new
4557 * internal request number against it
4558 *
4559 * PARAMETERS :
4560 * @fFrameNumber: Identifier given by framework
4561 * @internalFN : Output parameter which will have the newly generated internal
4562 * entry
4563 *
4564 * RETURN : Error code
4565 *
4566 *==========================================================================*/
4567int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4568 uint32_t &internalFrameNumber)
4569{
4570 Mutex::Autolock lock(mRegistryLock);
4571 internalFrameNumber = _nextFreeInternalNumber++;
4572 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4573 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4574 purgeOldEntriesLocked();
4575 return NO_ERROR;
4576}
4577
4578/*===========================================================================
4579 * FUNCTION : generateStoreInternalFrameNumber
4580 *
4581 * DESCRIPTION: Method to associate a new internal request number independent
4582 * of any associate with framework requests
4583 *
4584 * PARAMETERS :
4585 * @internalFrame#: Output parameter which will have the newly generated internal
4586 *
4587 *
4588 * RETURN : Error code
4589 *
4590 *==========================================================================*/
4591int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4592{
4593 Mutex::Autolock lock(mRegistryLock);
4594 internalFrameNumber = _nextFreeInternalNumber++;
4595 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4596 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4597 purgeOldEntriesLocked();
4598 return NO_ERROR;
4599}
4600
4601/*===========================================================================
4602 * FUNCTION : getFrameworkFrameNumber
4603 *
4604 * DESCRIPTION: Method to query the framework framenumber given an internal #
4605 *
4606 * PARAMETERS :
4607 * @internalFrame#: Internal reference
4608 * @frameworkframenumber: Output parameter holding framework frame entry
4609 *
4610 * RETURN : Error code
4611 *
4612 *==========================================================================*/
4613int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4614 uint32_t &frameworkFrameNumber)
4615{
4616 Mutex::Autolock lock(mRegistryLock);
4617 auto itr = _register.find(internalFrameNumber);
4618 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004619 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004620 return -ENOENT;
4621 }
4622
4623 frameworkFrameNumber = itr->second;
4624 purgeOldEntriesLocked();
4625 return NO_ERROR;
4626}
Thierry Strudel3d639192016-09-09 11:52:26 -07004627
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004628status_t QCamera3HardwareInterface::fillPbStreamConfig(
4629 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4630 QCamera3Channel *channel, uint32_t streamIndex) {
4631 if (config == nullptr) {
4632 LOGE("%s: config is null", __FUNCTION__);
4633 return BAD_VALUE;
4634 }
4635
4636 if (channel == nullptr) {
4637 LOGE("%s: channel is null", __FUNCTION__);
4638 return BAD_VALUE;
4639 }
4640
4641 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4642 if (stream == nullptr) {
4643 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4644 return NAME_NOT_FOUND;
4645 }
4646
4647 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4648 if (streamInfo == nullptr) {
4649 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4650 return NAME_NOT_FOUND;
4651 }
4652
4653 config->id = pbStreamId;
4654 config->image.width = streamInfo->dim.width;
4655 config->image.height = streamInfo->dim.height;
4656 config->image.padding = 0;
4657 config->image.format = pbStreamFormat;
4658
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004659 uint32_t totalPlaneSize = 0;
4660
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004661 // Fill plane information.
4662 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4663 pbcamera::PlaneConfiguration plane;
4664 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4665 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4666 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004667
4668 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004669 }
4670
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004671 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004672 return OK;
4673}
4674
Thierry Strudel3d639192016-09-09 11:52:26 -07004675/*===========================================================================
4676 * FUNCTION : processCaptureRequest
4677 *
4678 * DESCRIPTION: process a capture request from camera service
4679 *
4680 * PARAMETERS :
4681 * @request : request from framework to process
4682 *
4683 * RETURN :
4684 *
4685 *==========================================================================*/
4686int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004687 camera3_capture_request_t *request,
4688 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004689{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004690 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004691 int rc = NO_ERROR;
4692 int32_t request_id;
4693 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004694 bool isVidBufRequested = false;
4695 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004696 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004697
4698 pthread_mutex_lock(&mMutex);
4699
4700 // Validate current state
4701 switch (mState) {
4702 case CONFIGURED:
4703 case STARTED:
4704 /* valid state */
4705 break;
4706
4707 case ERROR:
4708 pthread_mutex_unlock(&mMutex);
4709 handleCameraDeviceError();
4710 return -ENODEV;
4711
4712 default:
4713 LOGE("Invalid state %d", mState);
4714 pthread_mutex_unlock(&mMutex);
4715 return -ENODEV;
4716 }
4717
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004718 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004719 if (rc != NO_ERROR) {
4720 LOGE("incoming request is not valid");
4721 pthread_mutex_unlock(&mMutex);
4722 return rc;
4723 }
4724
4725 meta = request->settings;
4726
4727 // For first capture request, send capture intent, and
4728 // stream on all streams
4729 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004730 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004731 // send an unconfigure to the backend so that the isp
4732 // resources are deallocated
4733 if (!mFirstConfiguration) {
4734 cam_stream_size_info_t stream_config_info;
4735 int32_t hal_version = CAM_HAL_V3;
4736 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4737 stream_config_info.buffer_info.min_buffers =
4738 MIN_INFLIGHT_REQUESTS;
4739 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004740 m_bIs4KVideo ? 0 :
4741 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004742 clear_metadata_buffer(mParameters);
4743 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4744 CAM_INTF_PARM_HAL_VERSION, hal_version);
4745 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4746 CAM_INTF_META_STREAM_INFO, stream_config_info);
4747 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4748 mParameters);
4749 if (rc < 0) {
4750 LOGE("set_parms for unconfigure failed");
4751 pthread_mutex_unlock(&mMutex);
4752 return rc;
4753 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004754
Thierry Strudel3d639192016-09-09 11:52:26 -07004755 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004756 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004757 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004758 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004759 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004760 property_get("persist.camera.is_type", is_type_value, "4");
4761 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4762 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4763 property_get("persist.camera.is_type_preview", is_type_value, "4");
4764 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4765 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004766
4767 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4768 int32_t hal_version = CAM_HAL_V3;
4769 uint8_t captureIntent =
4770 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4771 mCaptureIntent = captureIntent;
4772 clear_metadata_buffer(mParameters);
4773 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4774 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4775 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004776 if (mFirstConfiguration) {
4777 // configure instant AEC
4778 // Instant AEC is a session based parameter and it is needed only
4779 // once per complete session after open camera.
4780 // i.e. This is set only once for the first capture request, after open camera.
4781 setInstantAEC(meta);
4782 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004783 uint8_t fwkVideoStabMode=0;
4784 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4785 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4786 }
4787
Xue Tuecac74e2017-04-17 13:58:15 -07004788 // If EIS setprop is enabled then only turn it on for video/preview
4789 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004790 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004791 int32_t vsMode;
4792 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4793 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4794 rc = BAD_VALUE;
4795 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004796 LOGD("setEis %d", setEis);
4797 bool eis3Supported = false;
4798 size_t count = IS_TYPE_MAX;
4799 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4800 for (size_t i = 0; i < count; i++) {
4801 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4802 eis3Supported = true;
4803 break;
4804 }
4805 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004806
4807 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004808 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004809 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4810 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004811 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4812 is_type = isTypePreview;
4813 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4814 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4815 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004816 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004817 } else {
4818 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004819 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004820 } else {
4821 is_type = IS_TYPE_NONE;
4822 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004823 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004824 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004825 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4826 }
4827 }
4828
4829 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4830 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4831
Thierry Strudel54dc9782017-02-15 12:12:10 -08004832 //Disable tintless only if the property is set to 0
4833 memset(prop, 0, sizeof(prop));
4834 property_get("persist.camera.tintless.enable", prop, "1");
4835 int32_t tintless_value = atoi(prop);
4836
Thierry Strudel3d639192016-09-09 11:52:26 -07004837 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4838 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004839
Thierry Strudel3d639192016-09-09 11:52:26 -07004840 //Disable CDS for HFR mode or if DIS/EIS is on.
4841 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4842 //after every configure_stream
4843 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4844 (m_bIsVideo)) {
4845 int32_t cds = CAM_CDS_MODE_OFF;
4846 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4847 CAM_INTF_PARM_CDS_MODE, cds))
4848 LOGE("Failed to disable CDS for HFR mode");
4849
4850 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004851
4852 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4853 uint8_t* use_av_timer = NULL;
4854
4855 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004856 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004857 use_av_timer = &m_debug_avtimer;
4858 }
4859 else{
4860 use_av_timer =
4861 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004862 if (use_av_timer) {
4863 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4864 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004865 }
4866
4867 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4868 rc = BAD_VALUE;
4869 }
4870 }
4871
Thierry Strudel3d639192016-09-09 11:52:26 -07004872 setMobicat();
4873
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004874 uint8_t nrMode = 0;
4875 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4876 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4877 }
4878
Thierry Strudel3d639192016-09-09 11:52:26 -07004879 /* Set fps and hfr mode while sending meta stream info so that sensor
4880 * can configure appropriate streaming mode */
4881 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004882 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4883 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004884 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4885 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004886 if (rc == NO_ERROR) {
4887 int32_t max_fps =
4888 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004889 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004890 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4891 }
4892 /* For HFR, more buffers are dequeued upfront to improve the performance */
4893 if (mBatchSize) {
4894 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4895 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4896 }
4897 }
4898 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004899 LOGE("setHalFpsRange failed");
4900 }
4901 }
4902 if (meta.exists(ANDROID_CONTROL_MODE)) {
4903 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4904 rc = extractSceneMode(meta, metaMode, mParameters);
4905 if (rc != NO_ERROR) {
4906 LOGE("extractSceneMode failed");
4907 }
4908 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004909 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004910
Thierry Strudel04e026f2016-10-10 11:27:36 -07004911 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4912 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4913 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4914 rc = setVideoHdrMode(mParameters, vhdr);
4915 if (rc != NO_ERROR) {
4916 LOGE("setVideoHDR is failed");
4917 }
4918 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004919
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004920 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004921 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004922 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004923 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4924 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4925 sensorModeFullFov)) {
4926 rc = BAD_VALUE;
4927 }
4928 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004929 //TODO: validate the arguments, HSV scenemode should have only the
4930 //advertised fps ranges
4931
4932 /*set the capture intent, hal version, tintless, stream info,
4933 *and disenable parameters to the backend*/
4934 LOGD("set_parms META_STREAM_INFO " );
4935 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004936 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4937 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004938 mStreamConfigInfo.type[i],
4939 mStreamConfigInfo.stream_sizes[i].width,
4940 mStreamConfigInfo.stream_sizes[i].height,
4941 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004942 mStreamConfigInfo.format[i],
4943 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004944 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004945
Thierry Strudel3d639192016-09-09 11:52:26 -07004946 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4947 mParameters);
4948 if (rc < 0) {
4949 LOGE("set_parms failed for hal version, stream info");
4950 }
4951
Chien-Yu Chenee335912017-02-09 17:53:20 -08004952 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4953 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004954 if (rc != NO_ERROR) {
4955 LOGE("Failed to get sensor output size");
4956 pthread_mutex_unlock(&mMutex);
4957 goto error_exit;
4958 }
4959
4960 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4961 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004962 mSensorModeInfo.active_array_size.width,
4963 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004964
4965 /* Set batchmode before initializing channel. Since registerBuffer
4966 * internally initializes some of the channels, better set batchmode
4967 * even before first register buffer */
4968 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4969 it != mStreamInfo.end(); it++) {
4970 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4971 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4972 && mBatchSize) {
4973 rc = channel->setBatchSize(mBatchSize);
4974 //Disable per frame map unmap for HFR/batchmode case
4975 rc |= channel->setPerFrameMapUnmap(false);
4976 if (NO_ERROR != rc) {
4977 LOGE("Channel init failed %d", rc);
4978 pthread_mutex_unlock(&mMutex);
4979 goto error_exit;
4980 }
4981 }
4982 }
4983
4984 //First initialize all streams
4985 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4986 it != mStreamInfo.end(); it++) {
4987 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004988
4989 /* Initial value of NR mode is needed before stream on */
4990 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07004991 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4992 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004993 setEis) {
4994 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4995 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4996 is_type = mStreamConfigInfo.is_type[i];
4997 break;
4998 }
4999 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005000 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005001 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005002 rc = channel->initialize(IS_TYPE_NONE);
5003 }
5004 if (NO_ERROR != rc) {
5005 LOGE("Channel initialization failed %d", rc);
5006 pthread_mutex_unlock(&mMutex);
5007 goto error_exit;
5008 }
5009 }
5010
5011 if (mRawDumpChannel) {
5012 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5013 if (rc != NO_ERROR) {
5014 LOGE("Error: Raw Dump Channel init failed");
5015 pthread_mutex_unlock(&mMutex);
5016 goto error_exit;
5017 }
5018 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005019 if (mHdrPlusRawSrcChannel) {
5020 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5021 if (rc != NO_ERROR) {
5022 LOGE("Error: HDR+ RAW Source Channel init failed");
5023 pthread_mutex_unlock(&mMutex);
5024 goto error_exit;
5025 }
5026 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005027 if (mSupportChannel) {
5028 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5029 if (rc < 0) {
5030 LOGE("Support channel initialization failed");
5031 pthread_mutex_unlock(&mMutex);
5032 goto error_exit;
5033 }
5034 }
5035 if (mAnalysisChannel) {
5036 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5037 if (rc < 0) {
5038 LOGE("Analysis channel initialization failed");
5039 pthread_mutex_unlock(&mMutex);
5040 goto error_exit;
5041 }
5042 }
5043 if (mDummyBatchChannel) {
5044 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5045 if (rc < 0) {
5046 LOGE("mDummyBatchChannel setBatchSize failed");
5047 pthread_mutex_unlock(&mMutex);
5048 goto error_exit;
5049 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005050 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005051 if (rc < 0) {
5052 LOGE("mDummyBatchChannel initialization failed");
5053 pthread_mutex_unlock(&mMutex);
5054 goto error_exit;
5055 }
5056 }
5057
5058 // Set bundle info
5059 rc = setBundleInfo();
5060 if (rc < 0) {
5061 LOGE("setBundleInfo failed %d", rc);
5062 pthread_mutex_unlock(&mMutex);
5063 goto error_exit;
5064 }
5065
5066 //update settings from app here
5067 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5068 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5069 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5070 }
5071 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5072 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5073 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5074 }
5075 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5076 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5077 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5078
5079 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5080 (mLinkedCameraId != mCameraId) ) {
5081 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5082 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005083 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005084 goto error_exit;
5085 }
5086 }
5087
5088 // add bundle related cameras
5089 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5090 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005091 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5092 &m_pDualCamCmdPtr->bundle_info;
5093 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005094 if (mIsDeviceLinked)
5095 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5096 else
5097 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5098
5099 pthread_mutex_lock(&gCamLock);
5100
5101 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5102 LOGE("Dualcam: Invalid Session Id ");
5103 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005104 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005105 goto error_exit;
5106 }
5107
5108 if (mIsMainCamera == 1) {
5109 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5110 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005111 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005112 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005113 // related session id should be session id of linked session
5114 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5115 } else {
5116 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5117 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005118 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005119 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005120 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5121 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005122 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005123 pthread_mutex_unlock(&gCamLock);
5124
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005125 rc = mCameraHandle->ops->set_dual_cam_cmd(
5126 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005127 if (rc < 0) {
5128 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005129 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005130 goto error_exit;
5131 }
5132 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005133 goto no_error;
5134error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005135 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005136 return rc;
5137no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005138 mWokenUpByDaemon = false;
5139 mPendingLiveRequest = 0;
5140 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005141 }
5142
5143 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005144 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005145
5146 if (mFlushPerf) {
5147 //we cannot accept any requests during flush
5148 LOGE("process_capture_request cannot proceed during flush");
5149 pthread_mutex_unlock(&mMutex);
5150 return NO_ERROR; //should return an error
5151 }
5152
5153 if (meta.exists(ANDROID_REQUEST_ID)) {
5154 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5155 mCurrentRequestId = request_id;
5156 LOGD("Received request with id: %d", request_id);
5157 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5158 LOGE("Unable to find request id field, \
5159 & no previous id available");
5160 pthread_mutex_unlock(&mMutex);
5161 return NAME_NOT_FOUND;
5162 } else {
5163 LOGD("Re-using old request id");
5164 request_id = mCurrentRequestId;
5165 }
5166
5167 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5168 request->num_output_buffers,
5169 request->input_buffer,
5170 frameNumber);
5171 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005172 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005173 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005174 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005175 uint32_t snapshotStreamId = 0;
5176 for (size_t i = 0; i < request->num_output_buffers; i++) {
5177 const camera3_stream_buffer_t& output = request->output_buffers[i];
5178 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5179
Emilian Peev7650c122017-01-19 08:24:33 -08005180 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5181 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005182 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005183 blob_request = 1;
5184 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5185 }
5186
5187 if (output.acquire_fence != -1) {
5188 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5189 close(output.acquire_fence);
5190 if (rc != OK) {
5191 LOGE("sync wait failed %d", rc);
5192 pthread_mutex_unlock(&mMutex);
5193 return rc;
5194 }
5195 }
5196
Emilian Peev0f3c3162017-03-15 12:57:46 +00005197 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5198 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005199 depthRequestPresent = true;
5200 continue;
5201 }
5202
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005203 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005204 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005205
5206 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5207 isVidBufRequested = true;
5208 }
5209 }
5210
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005211 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5212 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5213 itr++) {
5214 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5215 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5216 channel->getStreamID(channel->getStreamTypeMask());
5217
5218 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5219 isVidBufRequested = true;
5220 }
5221 }
5222
Thierry Strudel3d639192016-09-09 11:52:26 -07005223 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005224 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005225 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005226 }
5227 if (blob_request && mRawDumpChannel) {
5228 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005229 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005230 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005231 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005232 }
5233
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005234 {
5235 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5236 // Request a RAW buffer if
5237 // 1. mHdrPlusRawSrcChannel is valid.
5238 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5239 // 3. There is no pending HDR+ request.
5240 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5241 mHdrPlusPendingRequests.size() == 0) {
5242 streamsArray.stream_request[streamsArray.num_streams].streamID =
5243 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5244 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5245 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005246 }
5247
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005248 //extract capture intent
5249 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5250 mCaptureIntent =
5251 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5252 }
5253
5254 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5255 mCacMode =
5256 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5257 }
5258
5259 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005260 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005261
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005262 {
5263 Mutex::Autolock l(gHdrPlusClientLock);
5264 // If this request has a still capture intent, try to submit an HDR+ request.
5265 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5266 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5267 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5268 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005269 }
5270
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005271 if (hdrPlusRequest) {
5272 // For a HDR+ request, just set the frame parameters.
5273 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5274 if (rc < 0) {
5275 LOGE("fail to set frame parameters");
5276 pthread_mutex_unlock(&mMutex);
5277 return rc;
5278 }
5279 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005280 /* Parse the settings:
5281 * - For every request in NORMAL MODE
5282 * - For every request in HFR mode during preview only case
5283 * - For first request of every batch in HFR mode during video
5284 * recording. In batchmode the same settings except frame number is
5285 * repeated in each request of the batch.
5286 */
5287 if (!mBatchSize ||
5288 (mBatchSize && !isVidBufRequested) ||
5289 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005290 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005291 if (rc < 0) {
5292 LOGE("fail to set frame parameters");
5293 pthread_mutex_unlock(&mMutex);
5294 return rc;
5295 }
5296 }
5297 /* For batchMode HFR, setFrameParameters is not called for every
5298 * request. But only frame number of the latest request is parsed.
5299 * Keep track of first and last frame numbers in a batch so that
5300 * metadata for the frame numbers of batch can be duplicated in
5301 * handleBatchMetadta */
5302 if (mBatchSize) {
5303 if (!mToBeQueuedVidBufs) {
5304 //start of the batch
5305 mFirstFrameNumberInBatch = request->frame_number;
5306 }
5307 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5308 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5309 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005310 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005311 return BAD_VALUE;
5312 }
5313 }
5314 if (mNeedSensorRestart) {
5315 /* Unlock the mutex as restartSensor waits on the channels to be
5316 * stopped, which in turn calls stream callback functions -
5317 * handleBufferWithLock and handleMetadataWithLock */
5318 pthread_mutex_unlock(&mMutex);
5319 rc = dynamicUpdateMetaStreamInfo();
5320 if (rc != NO_ERROR) {
5321 LOGE("Restarting the sensor failed");
5322 return BAD_VALUE;
5323 }
5324 mNeedSensorRestart = false;
5325 pthread_mutex_lock(&mMutex);
5326 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005327 if(mResetInstantAEC) {
5328 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5329 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5330 mResetInstantAEC = false;
5331 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005332 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005333 if (request->input_buffer->acquire_fence != -1) {
5334 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5335 close(request->input_buffer->acquire_fence);
5336 if (rc != OK) {
5337 LOGE("input buffer sync wait failed %d", rc);
5338 pthread_mutex_unlock(&mMutex);
5339 return rc;
5340 }
5341 }
5342 }
5343
5344 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5345 mLastCustIntentFrmNum = frameNumber;
5346 }
5347 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005348 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005349 pendingRequestIterator latestRequest;
5350 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005351 pendingRequest.num_buffers = depthRequestPresent ?
5352 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005353 pendingRequest.request_id = request_id;
5354 pendingRequest.blob_request = blob_request;
5355 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005356 if (request->input_buffer) {
5357 pendingRequest.input_buffer =
5358 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5359 *(pendingRequest.input_buffer) = *(request->input_buffer);
5360 pInputBuffer = pendingRequest.input_buffer;
5361 } else {
5362 pendingRequest.input_buffer = NULL;
5363 pInputBuffer = NULL;
5364 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005365 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005366
5367 pendingRequest.pipeline_depth = 0;
5368 pendingRequest.partial_result_cnt = 0;
5369 extractJpegMetadata(mCurJpegMeta, request);
5370 pendingRequest.jpegMetadata = mCurJpegMeta;
5371 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005372 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005373 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5374 mHybridAeEnable =
5375 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5376 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005377
5378 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5379 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005380 /* DevCamDebug metadata processCaptureRequest */
5381 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5382 mDevCamDebugMetaEnable =
5383 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5384 }
5385 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5386 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005387
5388 //extract CAC info
5389 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5390 mCacMode =
5391 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5392 }
5393 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005394 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005395
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005396 // extract enableZsl info
5397 if (gExposeEnableZslKey) {
5398 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5399 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5400 mZslEnabled = pendingRequest.enableZsl;
5401 } else {
5402 pendingRequest.enableZsl = mZslEnabled;
5403 }
5404 }
5405
Thierry Strudel3d639192016-09-09 11:52:26 -07005406 PendingBuffersInRequest bufsForCurRequest;
5407 bufsForCurRequest.frame_number = frameNumber;
5408 // Mark current timestamp for the new request
5409 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005410 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005411
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005412 if (hdrPlusRequest) {
5413 // Save settings for this request.
5414 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5415 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5416
5417 // Add to pending HDR+ request queue.
5418 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5419 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5420
5421 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5422 }
5423
Thierry Strudel3d639192016-09-09 11:52:26 -07005424 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005425 if ((request->output_buffers[i].stream->data_space ==
5426 HAL_DATASPACE_DEPTH) &&
5427 (HAL_PIXEL_FORMAT_BLOB ==
5428 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005429 continue;
5430 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005431 RequestedBufferInfo requestedBuf;
5432 memset(&requestedBuf, 0, sizeof(requestedBuf));
5433 requestedBuf.stream = request->output_buffers[i].stream;
5434 requestedBuf.buffer = NULL;
5435 pendingRequest.buffers.push_back(requestedBuf);
5436
5437 // Add to buffer handle the pending buffers list
5438 PendingBufferInfo bufferInfo;
5439 bufferInfo.buffer = request->output_buffers[i].buffer;
5440 bufferInfo.stream = request->output_buffers[i].stream;
5441 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5442 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5443 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5444 frameNumber, bufferInfo.buffer,
5445 channel->getStreamTypeMask(), bufferInfo.stream->format);
5446 }
5447 // Add this request packet into mPendingBuffersMap
5448 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5449 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5450 mPendingBuffersMap.get_num_overall_buffers());
5451
5452 latestRequest = mPendingRequestsList.insert(
5453 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005454
5455 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5456 // for the frame number.
5457 mShutterDispatcher.expectShutter(frameNumber);
5458 for (size_t i = 0; i < request->num_output_buffers; i++) {
5459 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5460 }
5461
Thierry Strudel3d639192016-09-09 11:52:26 -07005462 if(mFlush) {
5463 LOGI("mFlush is true");
5464 pthread_mutex_unlock(&mMutex);
5465 return NO_ERROR;
5466 }
5467
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005468 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5469 // channel.
5470 if (!hdrPlusRequest) {
5471 int indexUsed;
5472 // Notify metadata channel we receive a request
5473 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005474
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005475 if(request->input_buffer != NULL){
5476 LOGD("Input request, frame_number %d", frameNumber);
5477 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5478 if (NO_ERROR != rc) {
5479 LOGE("fail to set reproc parameters");
5480 pthread_mutex_unlock(&mMutex);
5481 return rc;
5482 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005483 }
5484
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005485 // Call request on other streams
5486 uint32_t streams_need_metadata = 0;
5487 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5488 for (size_t i = 0; i < request->num_output_buffers; i++) {
5489 const camera3_stream_buffer_t& output = request->output_buffers[i];
5490 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5491
5492 if (channel == NULL) {
5493 LOGW("invalid channel pointer for stream");
5494 continue;
5495 }
5496
5497 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5498 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5499 output.buffer, request->input_buffer, frameNumber);
5500 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005501 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005502 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5503 if (rc < 0) {
5504 LOGE("Fail to request on picture channel");
5505 pthread_mutex_unlock(&mMutex);
5506 return rc;
5507 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005508 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005509 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5510 assert(NULL != mDepthChannel);
5511 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005512
Emilian Peev7650c122017-01-19 08:24:33 -08005513 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5514 if (rc < 0) {
5515 LOGE("Fail to map on depth buffer");
5516 pthread_mutex_unlock(&mMutex);
5517 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005518 }
Emilian Peev7650c122017-01-19 08:24:33 -08005519 } else {
5520 LOGD("snapshot request with buffer %p, frame_number %d",
5521 output.buffer, frameNumber);
5522 if (!request->settings) {
5523 rc = channel->request(output.buffer, frameNumber,
5524 NULL, mPrevParameters, indexUsed);
5525 } else {
5526 rc = channel->request(output.buffer, frameNumber,
5527 NULL, mParameters, indexUsed);
5528 }
5529 if (rc < 0) {
5530 LOGE("Fail to request on picture channel");
5531 pthread_mutex_unlock(&mMutex);
5532 return rc;
5533 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005534
Emilian Peev7650c122017-01-19 08:24:33 -08005535 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5536 uint32_t j = 0;
5537 for (j = 0; j < streamsArray.num_streams; j++) {
5538 if (streamsArray.stream_request[j].streamID == streamId) {
5539 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5540 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5541 else
5542 streamsArray.stream_request[j].buf_index = indexUsed;
5543 break;
5544 }
5545 }
5546 if (j == streamsArray.num_streams) {
5547 LOGE("Did not find matching stream to update index");
5548 assert(0);
5549 }
5550
5551 pendingBufferIter->need_metadata = true;
5552 streams_need_metadata++;
5553 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005554 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005555 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5556 bool needMetadata = false;
5557 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5558 rc = yuvChannel->request(output.buffer, frameNumber,
5559 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5560 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005561 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005562 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005563 pthread_mutex_unlock(&mMutex);
5564 return rc;
5565 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005566
5567 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5568 uint32_t j = 0;
5569 for (j = 0; j < streamsArray.num_streams; j++) {
5570 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005571 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5572 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5573 else
5574 streamsArray.stream_request[j].buf_index = indexUsed;
5575 break;
5576 }
5577 }
5578 if (j == streamsArray.num_streams) {
5579 LOGE("Did not find matching stream to update index");
5580 assert(0);
5581 }
5582
5583 pendingBufferIter->need_metadata = needMetadata;
5584 if (needMetadata)
5585 streams_need_metadata += 1;
5586 LOGD("calling YUV channel request, need_metadata is %d",
5587 needMetadata);
5588 } else {
5589 LOGD("request with buffer %p, frame_number %d",
5590 output.buffer, frameNumber);
5591
5592 rc = channel->request(output.buffer, frameNumber, indexUsed);
5593
5594 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5595 uint32_t j = 0;
5596 for (j = 0; j < streamsArray.num_streams; j++) {
5597 if (streamsArray.stream_request[j].streamID == streamId) {
5598 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5599 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5600 else
5601 streamsArray.stream_request[j].buf_index = indexUsed;
5602 break;
5603 }
5604 }
5605 if (j == streamsArray.num_streams) {
5606 LOGE("Did not find matching stream to update index");
5607 assert(0);
5608 }
5609
5610 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5611 && mBatchSize) {
5612 mToBeQueuedVidBufs++;
5613 if (mToBeQueuedVidBufs == mBatchSize) {
5614 channel->queueBatchBuf();
5615 }
5616 }
5617 if (rc < 0) {
5618 LOGE("request failed");
5619 pthread_mutex_unlock(&mMutex);
5620 return rc;
5621 }
5622 }
5623 pendingBufferIter++;
5624 }
5625
5626 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5627 itr++) {
5628 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5629
5630 if (channel == NULL) {
5631 LOGE("invalid channel pointer for stream");
5632 assert(0);
5633 return BAD_VALUE;
5634 }
5635
5636 InternalRequest requestedStream;
5637 requestedStream = (*itr);
5638
5639
5640 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5641 LOGD("snapshot request internally input buffer %p, frame_number %d",
5642 request->input_buffer, frameNumber);
5643 if(request->input_buffer != NULL){
5644 rc = channel->request(NULL, frameNumber,
5645 pInputBuffer, &mReprocMeta, indexUsed, true,
5646 requestedStream.meteringOnly);
5647 if (rc < 0) {
5648 LOGE("Fail to request on picture channel");
5649 pthread_mutex_unlock(&mMutex);
5650 return rc;
5651 }
5652 } else {
5653 LOGD("snapshot request with frame_number %d", frameNumber);
5654 if (!request->settings) {
5655 rc = channel->request(NULL, frameNumber,
5656 NULL, mPrevParameters, indexUsed, true,
5657 requestedStream.meteringOnly);
5658 } else {
5659 rc = channel->request(NULL, frameNumber,
5660 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5661 }
5662 if (rc < 0) {
5663 LOGE("Fail to request on picture channel");
5664 pthread_mutex_unlock(&mMutex);
5665 return rc;
5666 }
5667
5668 if ((*itr).meteringOnly != 1) {
5669 requestedStream.need_metadata = 1;
5670 streams_need_metadata++;
5671 }
5672 }
5673
5674 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5675 uint32_t j = 0;
5676 for (j = 0; j < streamsArray.num_streams; j++) {
5677 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005678 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5679 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5680 else
5681 streamsArray.stream_request[j].buf_index = indexUsed;
5682 break;
5683 }
5684 }
5685 if (j == streamsArray.num_streams) {
5686 LOGE("Did not find matching stream to update index");
5687 assert(0);
5688 }
5689
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005690 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005691 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005692 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005693 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005694 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005695 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005696 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005697
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005698 //If 2 streams have need_metadata set to true, fail the request, unless
5699 //we copy/reference count the metadata buffer
5700 if (streams_need_metadata > 1) {
5701 LOGE("not supporting request in which two streams requires"
5702 " 2 HAL metadata for reprocessing");
5703 pthread_mutex_unlock(&mMutex);
5704 return -EINVAL;
5705 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005706
Emilian Peev7650c122017-01-19 08:24:33 -08005707 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5708 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5709 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5710 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5711 pthread_mutex_unlock(&mMutex);
5712 return BAD_VALUE;
5713 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005714 if (request->input_buffer == NULL) {
5715 /* Set the parameters to backend:
5716 * - For every request in NORMAL MODE
5717 * - For every request in HFR mode during preview only case
5718 * - Once every batch in HFR mode during video recording
5719 */
5720 if (!mBatchSize ||
5721 (mBatchSize && !isVidBufRequested) ||
5722 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5723 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5724 mBatchSize, isVidBufRequested,
5725 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005726
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005727 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5728 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5729 uint32_t m = 0;
5730 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5731 if (streamsArray.stream_request[k].streamID ==
5732 mBatchedStreamsArray.stream_request[m].streamID)
5733 break;
5734 }
5735 if (m == mBatchedStreamsArray.num_streams) {
5736 mBatchedStreamsArray.stream_request\
5737 [mBatchedStreamsArray.num_streams].streamID =
5738 streamsArray.stream_request[k].streamID;
5739 mBatchedStreamsArray.stream_request\
5740 [mBatchedStreamsArray.num_streams].buf_index =
5741 streamsArray.stream_request[k].buf_index;
5742 mBatchedStreamsArray.num_streams =
5743 mBatchedStreamsArray.num_streams + 1;
5744 }
5745 }
5746 streamsArray = mBatchedStreamsArray;
5747 }
5748 /* Update stream id of all the requested buffers */
5749 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5750 streamsArray)) {
5751 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005752 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005753 return BAD_VALUE;
5754 }
5755
5756 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5757 mParameters);
5758 if (rc < 0) {
5759 LOGE("set_parms failed");
5760 }
5761 /* reset to zero coz, the batch is queued */
5762 mToBeQueuedVidBufs = 0;
5763 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5764 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5765 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005766 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5767 uint32_t m = 0;
5768 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5769 if (streamsArray.stream_request[k].streamID ==
5770 mBatchedStreamsArray.stream_request[m].streamID)
5771 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005772 }
5773 if (m == mBatchedStreamsArray.num_streams) {
5774 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5775 streamID = streamsArray.stream_request[k].streamID;
5776 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5777 buf_index = streamsArray.stream_request[k].buf_index;
5778 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5779 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005780 }
5781 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005782 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005783
5784 // Start all streams after the first setting is sent, so that the
5785 // setting can be applied sooner: (0 + apply_delay)th frame.
5786 if (mState == CONFIGURED && mChannelHandle) {
5787 //Then start them.
5788 LOGH("Start META Channel");
5789 rc = mMetadataChannel->start();
5790 if (rc < 0) {
5791 LOGE("META channel start failed");
5792 pthread_mutex_unlock(&mMutex);
5793 return rc;
5794 }
5795
5796 if (mAnalysisChannel) {
5797 rc = mAnalysisChannel->start();
5798 if (rc < 0) {
5799 LOGE("Analysis channel start failed");
5800 mMetadataChannel->stop();
5801 pthread_mutex_unlock(&mMutex);
5802 return rc;
5803 }
5804 }
5805
5806 if (mSupportChannel) {
5807 rc = mSupportChannel->start();
5808 if (rc < 0) {
5809 LOGE("Support channel start failed");
5810 mMetadataChannel->stop();
5811 /* Although support and analysis are mutually exclusive today
5812 adding it in anycase for future proofing */
5813 if (mAnalysisChannel) {
5814 mAnalysisChannel->stop();
5815 }
5816 pthread_mutex_unlock(&mMutex);
5817 return rc;
5818 }
5819 }
5820 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5821 it != mStreamInfo.end(); it++) {
5822 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5823 LOGH("Start Processing Channel mask=%d",
5824 channel->getStreamTypeMask());
5825 rc = channel->start();
5826 if (rc < 0) {
5827 LOGE("channel start failed");
5828 pthread_mutex_unlock(&mMutex);
5829 return rc;
5830 }
5831 }
5832
5833 if (mRawDumpChannel) {
5834 LOGD("Starting raw dump stream");
5835 rc = mRawDumpChannel->start();
5836 if (rc != NO_ERROR) {
5837 LOGE("Error Starting Raw Dump Channel");
5838 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5839 it != mStreamInfo.end(); it++) {
5840 QCamera3Channel *channel =
5841 (QCamera3Channel *)(*it)->stream->priv;
5842 LOGH("Stopping Processing Channel mask=%d",
5843 channel->getStreamTypeMask());
5844 channel->stop();
5845 }
5846 if (mSupportChannel)
5847 mSupportChannel->stop();
5848 if (mAnalysisChannel) {
5849 mAnalysisChannel->stop();
5850 }
5851 mMetadataChannel->stop();
5852 pthread_mutex_unlock(&mMutex);
5853 return rc;
5854 }
5855 }
5856
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005857 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005858 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005859 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005860 if (rc != NO_ERROR) {
5861 LOGE("start_channel failed %d", rc);
5862 pthread_mutex_unlock(&mMutex);
5863 return rc;
5864 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005865
5866 {
5867 // Configure Easel for stream on.
5868 Mutex::Autolock l(gHdrPlusClientLock);
5869 if (EaselManagerClientOpened) {
5870 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chena6c99062017-05-23 13:45:06 -07005871 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
5872 /*enableIpu*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005873 if (rc != OK) {
5874 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5875 mCameraId, mSensorModeInfo.op_pixel_clk);
5876 pthread_mutex_unlock(&mMutex);
5877 return rc;
5878 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005879 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005880 }
5881 }
5882
5883 // Start sensor streaming.
5884 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5885 mChannelHandle);
5886 if (rc != NO_ERROR) {
5887 LOGE("start_sensor_stream_on failed %d", rc);
5888 pthread_mutex_unlock(&mMutex);
5889 return rc;
5890 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005891 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005892 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005893 }
5894
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005895 // Enable HDR+ mode for the first PREVIEW_INTENT request.
5896 {
5897 Mutex::Autolock l(gHdrPlusClientLock);
5898 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5899 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5900 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5901 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5902 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5903 rc = enableHdrPlusModeLocked();
5904 if (rc != OK) {
5905 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
5906 pthread_mutex_unlock(&mMutex);
5907 return rc;
5908 }
5909
5910 mFirstPreviewIntentSeen = true;
5911 }
5912 }
5913
Thierry Strudel3d639192016-09-09 11:52:26 -07005914 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5915
5916 mState = STARTED;
5917 // Added a timed condition wait
5918 struct timespec ts;
5919 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005920 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005921 if (rc < 0) {
5922 isValidTimeout = 0;
5923 LOGE("Error reading the real time clock!!");
5924 }
5925 else {
5926 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005927 int64_t timeout = 5;
5928 {
5929 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5930 // If there is a pending HDR+ request, the following requests may be blocked until the
5931 // HDR+ request is done. So allow a longer timeout.
5932 if (mHdrPlusPendingRequests.size() > 0) {
5933 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5934 }
5935 }
5936 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005937 }
5938 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005939 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005940 (mState != ERROR) && (mState != DEINIT)) {
5941 if (!isValidTimeout) {
5942 LOGD("Blocking on conditional wait");
5943 pthread_cond_wait(&mRequestCond, &mMutex);
5944 }
5945 else {
5946 LOGD("Blocking on timed conditional wait");
5947 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5948 if (rc == ETIMEDOUT) {
5949 rc = -ENODEV;
5950 LOGE("Unblocked on timeout!!!!");
5951 break;
5952 }
5953 }
5954 LOGD("Unblocked");
5955 if (mWokenUpByDaemon) {
5956 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005957 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005958 break;
5959 }
5960 }
5961 pthread_mutex_unlock(&mMutex);
5962
5963 return rc;
5964}
5965
5966/*===========================================================================
5967 * FUNCTION : dump
5968 *
5969 * DESCRIPTION:
5970 *
5971 * PARAMETERS :
5972 *
5973 *
5974 * RETURN :
5975 *==========================================================================*/
5976void QCamera3HardwareInterface::dump(int fd)
5977{
5978 pthread_mutex_lock(&mMutex);
5979 dprintf(fd, "\n Camera HAL3 information Begin \n");
5980
5981 dprintf(fd, "\nNumber of pending requests: %zu \n",
5982 mPendingRequestsList.size());
5983 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5984 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5985 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5986 for(pendingRequestIterator i = mPendingRequestsList.begin();
5987 i != mPendingRequestsList.end(); i++) {
5988 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5989 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5990 i->input_buffer);
5991 }
5992 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5993 mPendingBuffersMap.get_num_overall_buffers());
5994 dprintf(fd, "-------+------------------\n");
5995 dprintf(fd, " Frame | Stream type mask \n");
5996 dprintf(fd, "-------+------------------\n");
5997 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5998 for(auto &j : req.mPendingBufferList) {
5999 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6000 dprintf(fd, " %5d | %11d \n",
6001 req.frame_number, channel->getStreamTypeMask());
6002 }
6003 }
6004 dprintf(fd, "-------+------------------\n");
6005
6006 dprintf(fd, "\nPending frame drop list: %zu\n",
6007 mPendingFrameDropList.size());
6008 dprintf(fd, "-------+-----------\n");
6009 dprintf(fd, " Frame | Stream ID \n");
6010 dprintf(fd, "-------+-----------\n");
6011 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6012 i != mPendingFrameDropList.end(); i++) {
6013 dprintf(fd, " %5d | %9d \n",
6014 i->frame_number, i->stream_ID);
6015 }
6016 dprintf(fd, "-------+-----------\n");
6017
6018 dprintf(fd, "\n Camera HAL3 information End \n");
6019
6020 /* use dumpsys media.camera as trigger to send update debug level event */
6021 mUpdateDebugLevel = true;
6022 pthread_mutex_unlock(&mMutex);
6023 return;
6024}
6025
6026/*===========================================================================
6027 * FUNCTION : flush
6028 *
6029 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6030 * conditionally restarts channels
6031 *
6032 * PARAMETERS :
6033 * @ restartChannels: re-start all channels
6034 *
6035 *
6036 * RETURN :
6037 * 0 on success
6038 * Error code on failure
6039 *==========================================================================*/
6040int QCamera3HardwareInterface::flush(bool restartChannels)
6041{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006042 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006043 int32_t rc = NO_ERROR;
6044
6045 LOGD("Unblocking Process Capture Request");
6046 pthread_mutex_lock(&mMutex);
6047 mFlush = true;
6048 pthread_mutex_unlock(&mMutex);
6049
6050 rc = stopAllChannels();
6051 // unlink of dualcam
6052 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006053 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6054 &m_pDualCamCmdPtr->bundle_info;
6055 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006056 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6057 pthread_mutex_lock(&gCamLock);
6058
6059 if (mIsMainCamera == 1) {
6060 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6061 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006062 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006063 // related session id should be session id of linked session
6064 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6065 } else {
6066 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6067 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006068 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006069 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6070 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006071 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006072 pthread_mutex_unlock(&gCamLock);
6073
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006074 rc = mCameraHandle->ops->set_dual_cam_cmd(
6075 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006076 if (rc < 0) {
6077 LOGE("Dualcam: Unlink failed, but still proceed to close");
6078 }
6079 }
6080
6081 if (rc < 0) {
6082 LOGE("stopAllChannels failed");
6083 return rc;
6084 }
6085 if (mChannelHandle) {
6086 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6087 mChannelHandle);
6088 }
6089
6090 // Reset bundle info
6091 rc = setBundleInfo();
6092 if (rc < 0) {
6093 LOGE("setBundleInfo failed %d", rc);
6094 return rc;
6095 }
6096
6097 // Mutex Lock
6098 pthread_mutex_lock(&mMutex);
6099
6100 // Unblock process_capture_request
6101 mPendingLiveRequest = 0;
6102 pthread_cond_signal(&mRequestCond);
6103
6104 rc = notifyErrorForPendingRequests();
6105 if (rc < 0) {
6106 LOGE("notifyErrorForPendingRequests failed");
6107 pthread_mutex_unlock(&mMutex);
6108 return rc;
6109 }
6110
6111 mFlush = false;
6112
6113 // Start the Streams/Channels
6114 if (restartChannels) {
6115 rc = startAllChannels();
6116 if (rc < 0) {
6117 LOGE("startAllChannels failed");
6118 pthread_mutex_unlock(&mMutex);
6119 return rc;
6120 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006121 if (mChannelHandle) {
6122 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006123 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006124 if (rc < 0) {
6125 LOGE("start_channel failed");
6126 pthread_mutex_unlock(&mMutex);
6127 return rc;
6128 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006129 }
6130 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006131 pthread_mutex_unlock(&mMutex);
6132
6133 return 0;
6134}
6135
6136/*===========================================================================
6137 * FUNCTION : flushPerf
6138 *
6139 * DESCRIPTION: This is the performance optimization version of flush that does
6140 * not use stream off, rather flushes the system
6141 *
6142 * PARAMETERS :
6143 *
6144 *
6145 * RETURN : 0 : success
6146 * -EINVAL: input is malformed (device is not valid)
6147 * -ENODEV: if the device has encountered a serious error
6148 *==========================================================================*/
6149int QCamera3HardwareInterface::flushPerf()
6150{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006151 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006152 int32_t rc = 0;
6153 struct timespec timeout;
6154 bool timed_wait = false;
6155
6156 pthread_mutex_lock(&mMutex);
6157 mFlushPerf = true;
6158 mPendingBuffersMap.numPendingBufsAtFlush =
6159 mPendingBuffersMap.get_num_overall_buffers();
6160 LOGD("Calling flush. Wait for %d buffers to return",
6161 mPendingBuffersMap.numPendingBufsAtFlush);
6162
6163 /* send the flush event to the backend */
6164 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6165 if (rc < 0) {
6166 LOGE("Error in flush: IOCTL failure");
6167 mFlushPerf = false;
6168 pthread_mutex_unlock(&mMutex);
6169 return -ENODEV;
6170 }
6171
6172 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6173 LOGD("No pending buffers in HAL, return flush");
6174 mFlushPerf = false;
6175 pthread_mutex_unlock(&mMutex);
6176 return rc;
6177 }
6178
6179 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006180 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006181 if (rc < 0) {
6182 LOGE("Error reading the real time clock, cannot use timed wait");
6183 } else {
6184 timeout.tv_sec += FLUSH_TIMEOUT;
6185 timed_wait = true;
6186 }
6187
6188 //Block on conditional variable
6189 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6190 LOGD("Waiting on mBuffersCond");
6191 if (!timed_wait) {
6192 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6193 if (rc != 0) {
6194 LOGE("pthread_cond_wait failed due to rc = %s",
6195 strerror(rc));
6196 break;
6197 }
6198 } else {
6199 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6200 if (rc != 0) {
6201 LOGE("pthread_cond_timedwait failed due to rc = %s",
6202 strerror(rc));
6203 break;
6204 }
6205 }
6206 }
6207 if (rc != 0) {
6208 mFlushPerf = false;
6209 pthread_mutex_unlock(&mMutex);
6210 return -ENODEV;
6211 }
6212
6213 LOGD("Received buffers, now safe to return them");
6214
6215 //make sure the channels handle flush
6216 //currently only required for the picture channel to release snapshot resources
6217 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6218 it != mStreamInfo.end(); it++) {
6219 QCamera3Channel *channel = (*it)->channel;
6220 if (channel) {
6221 rc = channel->flush();
6222 if (rc) {
6223 LOGE("Flushing the channels failed with error %d", rc);
6224 // even though the channel flush failed we need to continue and
6225 // return the buffers we have to the framework, however the return
6226 // value will be an error
6227 rc = -ENODEV;
6228 }
6229 }
6230 }
6231
6232 /* notify the frameworks and send errored results */
6233 rc = notifyErrorForPendingRequests();
6234 if (rc < 0) {
6235 LOGE("notifyErrorForPendingRequests failed");
6236 pthread_mutex_unlock(&mMutex);
6237 return rc;
6238 }
6239
6240 //unblock process_capture_request
6241 mPendingLiveRequest = 0;
6242 unblockRequestIfNecessary();
6243
6244 mFlushPerf = false;
6245 pthread_mutex_unlock(&mMutex);
6246 LOGD ("Flush Operation complete. rc = %d", rc);
6247 return rc;
6248}
6249
6250/*===========================================================================
6251 * FUNCTION : handleCameraDeviceError
6252 *
6253 * DESCRIPTION: This function calls internal flush and notifies the error to
6254 * framework and updates the state variable.
6255 *
6256 * PARAMETERS : None
6257 *
6258 * RETURN : NO_ERROR on Success
6259 * Error code on failure
6260 *==========================================================================*/
6261int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6262{
6263 int32_t rc = NO_ERROR;
6264
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006265 {
6266 Mutex::Autolock lock(mFlushLock);
6267 pthread_mutex_lock(&mMutex);
6268 if (mState != ERROR) {
6269 //if mState != ERROR, nothing to be done
6270 pthread_mutex_unlock(&mMutex);
6271 return NO_ERROR;
6272 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006273 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006274
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006275 rc = flush(false /* restart channels */);
6276 if (NO_ERROR != rc) {
6277 LOGE("internal flush to handle mState = ERROR failed");
6278 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006279
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006280 pthread_mutex_lock(&mMutex);
6281 mState = DEINIT;
6282 pthread_mutex_unlock(&mMutex);
6283 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006284
6285 camera3_notify_msg_t notify_msg;
6286 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6287 notify_msg.type = CAMERA3_MSG_ERROR;
6288 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6289 notify_msg.message.error.error_stream = NULL;
6290 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006291 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006292
6293 return rc;
6294}
6295
6296/*===========================================================================
6297 * FUNCTION : captureResultCb
6298 *
6299 * DESCRIPTION: Callback handler for all capture result
6300 * (streams, as well as metadata)
6301 *
6302 * PARAMETERS :
6303 * @metadata : metadata information
6304 * @buffer : actual gralloc buffer to be returned to frameworks.
6305 * NULL if metadata.
6306 *
6307 * RETURN : NONE
6308 *==========================================================================*/
6309void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6310 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6311{
6312 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006313 pthread_mutex_lock(&mMutex);
6314 uint8_t batchSize = mBatchSize;
6315 pthread_mutex_unlock(&mMutex);
6316 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006317 handleBatchMetadata(metadata_buf,
6318 true /* free_and_bufdone_meta_buf */);
6319 } else { /* mBatchSize = 0 */
6320 hdrPlusPerfLock(metadata_buf);
6321 pthread_mutex_lock(&mMutex);
6322 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006323 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006324 true /* last urgent frame of batch metadata */,
6325 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006326 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006327 pthread_mutex_unlock(&mMutex);
6328 }
6329 } else if (isInputBuffer) {
6330 pthread_mutex_lock(&mMutex);
6331 handleInputBufferWithLock(frame_number);
6332 pthread_mutex_unlock(&mMutex);
6333 } else {
6334 pthread_mutex_lock(&mMutex);
6335 handleBufferWithLock(buffer, frame_number);
6336 pthread_mutex_unlock(&mMutex);
6337 }
6338 return;
6339}
6340
6341/*===========================================================================
6342 * FUNCTION : getReprocessibleOutputStreamId
6343 *
6344 * DESCRIPTION: Get source output stream id for the input reprocess stream
6345 * based on size and format, which would be the largest
6346 * output stream if an input stream exists.
6347 *
6348 * PARAMETERS :
6349 * @id : return the stream id if found
6350 *
6351 * RETURN : int32_t type of status
6352 * NO_ERROR -- success
6353 * none-zero failure code
6354 *==========================================================================*/
6355int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6356{
6357 /* check if any output or bidirectional stream with the same size and format
6358 and return that stream */
6359 if ((mInputStreamInfo.dim.width > 0) &&
6360 (mInputStreamInfo.dim.height > 0)) {
6361 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6362 it != mStreamInfo.end(); it++) {
6363
6364 camera3_stream_t *stream = (*it)->stream;
6365 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6366 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6367 (stream->format == mInputStreamInfo.format)) {
6368 // Usage flag for an input stream and the source output stream
6369 // may be different.
6370 LOGD("Found reprocessible output stream! %p", *it);
6371 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6372 stream->usage, mInputStreamInfo.usage);
6373
6374 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6375 if (channel != NULL && channel->mStreams[0]) {
6376 id = channel->mStreams[0]->getMyServerID();
6377 return NO_ERROR;
6378 }
6379 }
6380 }
6381 } else {
6382 LOGD("No input stream, so no reprocessible output stream");
6383 }
6384 return NAME_NOT_FOUND;
6385}
6386
6387/*===========================================================================
6388 * FUNCTION : lookupFwkName
6389 *
6390 * DESCRIPTION: In case the enum is not same in fwk and backend
6391 * make sure the parameter is correctly propogated
6392 *
6393 * PARAMETERS :
6394 * @arr : map between the two enums
6395 * @len : len of the map
6396 * @hal_name : name of the hal_parm to map
6397 *
6398 * RETURN : int type of status
6399 * fwk_name -- success
6400 * none-zero failure code
6401 *==========================================================================*/
6402template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6403 size_t len, halType hal_name)
6404{
6405
6406 for (size_t i = 0; i < len; i++) {
6407 if (arr[i].hal_name == hal_name) {
6408 return arr[i].fwk_name;
6409 }
6410 }
6411
6412 /* Not able to find matching framework type is not necessarily
6413 * an error case. This happens when mm-camera supports more attributes
6414 * than the frameworks do */
6415 LOGH("Cannot find matching framework type");
6416 return NAME_NOT_FOUND;
6417}
6418
6419/*===========================================================================
6420 * FUNCTION : lookupHalName
6421 *
6422 * DESCRIPTION: In case the enum is not same in fwk and backend
6423 * make sure the parameter is correctly propogated
6424 *
6425 * PARAMETERS :
6426 * @arr : map between the two enums
6427 * @len : len of the map
6428 * @fwk_name : name of the hal_parm to map
6429 *
6430 * RETURN : int32_t type of status
6431 * hal_name -- success
6432 * none-zero failure code
6433 *==========================================================================*/
6434template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6435 size_t len, fwkType fwk_name)
6436{
6437 for (size_t i = 0; i < len; i++) {
6438 if (arr[i].fwk_name == fwk_name) {
6439 return arr[i].hal_name;
6440 }
6441 }
6442
6443 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6444 return NAME_NOT_FOUND;
6445}
6446
6447/*===========================================================================
6448 * FUNCTION : lookupProp
6449 *
6450 * DESCRIPTION: lookup a value by its name
6451 *
6452 * PARAMETERS :
6453 * @arr : map between the two enums
6454 * @len : size of the map
6455 * @name : name to be looked up
6456 *
6457 * RETURN : Value if found
6458 * CAM_CDS_MODE_MAX if not found
6459 *==========================================================================*/
6460template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6461 size_t len, const char *name)
6462{
6463 if (name) {
6464 for (size_t i = 0; i < len; i++) {
6465 if (!strcmp(arr[i].desc, name)) {
6466 return arr[i].val;
6467 }
6468 }
6469 }
6470 return CAM_CDS_MODE_MAX;
6471}
6472
6473/*===========================================================================
6474 *
6475 * DESCRIPTION:
6476 *
6477 * PARAMETERS :
6478 * @metadata : metadata information from callback
6479 * @timestamp: metadata buffer timestamp
6480 * @request_id: request id
6481 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006482 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006483 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6484 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006485 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006486 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6487 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006488 *
6489 * RETURN : camera_metadata_t*
6490 * metadata in a format specified by fwk
6491 *==========================================================================*/
6492camera_metadata_t*
6493QCamera3HardwareInterface::translateFromHalMetadata(
6494 metadata_buffer_t *metadata,
6495 nsecs_t timestamp,
6496 int32_t request_id,
6497 const CameraMetadata& jpegMetadata,
6498 uint8_t pipeline_depth,
6499 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006500 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006501 /* DevCamDebug metadata translateFromHalMetadata argument */
6502 uint8_t DevCamDebug_meta_enable,
6503 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006504 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006505 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006506 bool lastMetadataInBatch,
6507 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006508{
6509 CameraMetadata camMetadata;
6510 camera_metadata_t *resultMetadata;
6511
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006512 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006513 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6514 * Timestamp is needed because it's used for shutter notify calculation.
6515 * */
6516 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6517 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006518 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006519 }
6520
Thierry Strudel3d639192016-09-09 11:52:26 -07006521 if (jpegMetadata.entryCount())
6522 camMetadata.append(jpegMetadata);
6523
6524 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6525 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6526 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6527 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006528 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006529 if (mBatchSize == 0) {
6530 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6531 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6532 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006533
Samuel Ha68ba5172016-12-15 18:41:12 -08006534 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6535 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6536 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6537 // DevCamDebug metadata translateFromHalMetadata AF
6538 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6539 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6540 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6541 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6542 }
6543 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6544 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6545 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6546 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6547 }
6548 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6549 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6550 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6551 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6552 }
6553 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6554 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6555 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6556 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6557 }
6558 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6559 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6560 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6561 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6562 }
6563 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6564 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6565 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6566 *DevCamDebug_af_monitor_pdaf_target_pos;
6567 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6568 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6569 }
6570 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6571 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6572 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6573 *DevCamDebug_af_monitor_pdaf_confidence;
6574 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6575 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6576 }
6577 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6578 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6579 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6580 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6581 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6582 }
6583 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6584 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6585 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6586 *DevCamDebug_af_monitor_tof_target_pos;
6587 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6588 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6589 }
6590 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6591 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6592 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6593 *DevCamDebug_af_monitor_tof_confidence;
6594 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6595 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6596 }
6597 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6598 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6599 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6600 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6601 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6602 }
6603 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6604 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6605 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6606 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6607 &fwk_DevCamDebug_af_monitor_type_select, 1);
6608 }
6609 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6610 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6611 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6612 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6613 &fwk_DevCamDebug_af_monitor_refocus, 1);
6614 }
6615 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6616 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6617 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6618 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6619 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6620 }
6621 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6622 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6623 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6624 *DevCamDebug_af_search_pdaf_target_pos;
6625 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6626 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6627 }
6628 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6629 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6630 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6631 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6632 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6633 }
6634 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6635 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6636 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6637 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6638 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6639 }
6640 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6641 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6642 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6643 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6644 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6645 }
6646 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6647 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6648 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6649 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6650 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6651 }
6652 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6653 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6654 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6655 *DevCamDebug_af_search_tof_target_pos;
6656 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6657 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6658 }
6659 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6660 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6661 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6662 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6663 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6664 }
6665 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6666 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6667 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6668 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6669 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6670 }
6671 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6672 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6673 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6674 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6675 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6676 }
6677 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6678 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6679 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6680 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6681 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6682 }
6683 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6684 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6685 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6686 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6687 &fwk_DevCamDebug_af_search_type_select, 1);
6688 }
6689 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6690 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6691 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6692 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6693 &fwk_DevCamDebug_af_search_next_pos, 1);
6694 }
6695 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6696 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6697 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6698 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6699 &fwk_DevCamDebug_af_search_target_pos, 1);
6700 }
6701 // DevCamDebug metadata translateFromHalMetadata AEC
6702 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6703 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6704 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6705 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6706 }
6707 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6708 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6709 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6710 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6711 }
6712 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6713 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6714 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6715 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6716 }
6717 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6718 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6719 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6720 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6721 }
6722 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6723 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6724 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6725 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6726 }
6727 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6728 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6729 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6730 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6731 }
6732 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6733 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6734 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6735 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6736 }
6737 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6738 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6739 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6740 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6741 }
Samuel Ha34229982017-02-17 13:51:11 -08006742 // DevCamDebug metadata translateFromHalMetadata zzHDR
6743 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6744 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6745 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6746 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6747 }
6748 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6749 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006750 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006751 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6752 }
6753 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6754 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6755 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6756 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6757 }
6758 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6759 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006760 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006761 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6762 }
6763 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6764 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6765 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6766 *DevCamDebug_aec_hdr_sensitivity_ratio;
6767 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6768 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6769 }
6770 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6771 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6772 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6773 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6774 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6775 }
6776 // DevCamDebug metadata translateFromHalMetadata ADRC
6777 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6778 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6779 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6780 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6781 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6782 }
6783 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6784 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6785 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6786 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6787 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6788 }
6789 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6790 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6791 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6792 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6793 }
6794 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6795 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6796 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6797 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6798 }
6799 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6800 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6801 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6802 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6803 }
6804 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6805 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6806 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6807 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6808 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006809 // DevCamDebug metadata translateFromHalMetadata AWB
6810 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6811 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6812 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6813 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6814 }
6815 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6816 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6817 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6818 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6819 }
6820 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6821 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6822 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6823 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6824 }
6825 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6826 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6827 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6828 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6829 }
6830 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6831 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6832 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6833 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6834 }
6835 }
6836 // atrace_end(ATRACE_TAG_ALWAYS);
6837
Thierry Strudel3d639192016-09-09 11:52:26 -07006838 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6839 int64_t fwk_frame_number = *frame_number;
6840 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6841 }
6842
6843 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6844 int32_t fps_range[2];
6845 fps_range[0] = (int32_t)float_range->min_fps;
6846 fps_range[1] = (int32_t)float_range->max_fps;
6847 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6848 fps_range, 2);
6849 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6850 fps_range[0], fps_range[1]);
6851 }
6852
6853 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6854 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6855 }
6856
6857 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6858 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6859 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6860 *sceneMode);
6861 if (NAME_NOT_FOUND != val) {
6862 uint8_t fwkSceneMode = (uint8_t)val;
6863 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6864 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6865 fwkSceneMode);
6866 }
6867 }
6868
6869 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6870 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6871 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6872 }
6873
6874 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6875 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6876 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6877 }
6878
6879 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6880 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6881 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6882 }
6883
6884 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6885 CAM_INTF_META_EDGE_MODE, metadata) {
6886 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6887 }
6888
6889 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6890 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6891 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6892 }
6893
6894 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6895 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6896 }
6897
6898 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6899 if (0 <= *flashState) {
6900 uint8_t fwk_flashState = (uint8_t) *flashState;
6901 if (!gCamCapability[mCameraId]->flash_available) {
6902 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6903 }
6904 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6905 }
6906 }
6907
6908 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6909 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6910 if (NAME_NOT_FOUND != val) {
6911 uint8_t fwk_flashMode = (uint8_t)val;
6912 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6913 }
6914 }
6915
6916 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6917 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6918 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6919 }
6920
6921 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6922 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6923 }
6924
6925 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6926 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6927 }
6928
6929 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6930 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6931 }
6932
6933 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6934 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6935 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6936 }
6937
6938 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6939 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6940 LOGD("fwk_videoStab = %d", fwk_videoStab);
6941 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6942 } else {
6943 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6944 // and so hardcoding the Video Stab result to OFF mode.
6945 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6946 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006947 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006948 }
6949
6950 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6951 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6952 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6953 }
6954
6955 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6956 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6957 }
6958
Thierry Strudel3d639192016-09-09 11:52:26 -07006959 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6960 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006961 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006962
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006963 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6964 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006965
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006966 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006967 blackLevelAppliedPattern->cam_black_level[0],
6968 blackLevelAppliedPattern->cam_black_level[1],
6969 blackLevelAppliedPattern->cam_black_level[2],
6970 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006971 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6972 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006973
6974#ifndef USE_HAL_3_3
6975 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05306976 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07006977 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05306978 fwk_blackLevelInd[0] /= 16.0;
6979 fwk_blackLevelInd[1] /= 16.0;
6980 fwk_blackLevelInd[2] /= 16.0;
6981 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006982 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6983 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006984#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006985 }
6986
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006987#ifndef USE_HAL_3_3
6988 // Fixed whitelevel is used by ISP/Sensor
6989 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6990 &gCamCapability[mCameraId]->white_level, 1);
6991#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006992
6993 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6994 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6995 int32_t scalerCropRegion[4];
6996 scalerCropRegion[0] = hScalerCropRegion->left;
6997 scalerCropRegion[1] = hScalerCropRegion->top;
6998 scalerCropRegion[2] = hScalerCropRegion->width;
6999 scalerCropRegion[3] = hScalerCropRegion->height;
7000
7001 // Adjust crop region from sensor output coordinate system to active
7002 // array coordinate system.
7003 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7004 scalerCropRegion[2], scalerCropRegion[3]);
7005
7006 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7007 }
7008
7009 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7010 LOGD("sensorExpTime = %lld", *sensorExpTime);
7011 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7012 }
7013
7014 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7015 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7016 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7017 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7018 }
7019
7020 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7021 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7022 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7023 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7024 sensorRollingShutterSkew, 1);
7025 }
7026
7027 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7028 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7029 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7030
7031 //calculate the noise profile based on sensitivity
7032 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7033 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7034 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7035 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7036 noise_profile[i] = noise_profile_S;
7037 noise_profile[i+1] = noise_profile_O;
7038 }
7039 LOGD("noise model entry (S, O) is (%f, %f)",
7040 noise_profile_S, noise_profile_O);
7041 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7042 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7043 }
7044
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007045#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007046 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007047 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007048 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007049 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007050 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7051 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7052 }
7053 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007054#endif
7055
Thierry Strudel3d639192016-09-09 11:52:26 -07007056 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7057 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7058 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7059 }
7060
7061 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7062 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7063 *faceDetectMode);
7064 if (NAME_NOT_FOUND != val) {
7065 uint8_t fwk_faceDetectMode = (uint8_t)val;
7066 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7067
7068 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7069 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7070 CAM_INTF_META_FACE_DETECTION, metadata) {
7071 uint8_t numFaces = MIN(
7072 faceDetectionInfo->num_faces_detected, MAX_ROI);
7073 int32_t faceIds[MAX_ROI];
7074 uint8_t faceScores[MAX_ROI];
7075 int32_t faceRectangles[MAX_ROI * 4];
7076 int32_t faceLandmarks[MAX_ROI * 6];
7077 size_t j = 0, k = 0;
7078
7079 for (size_t i = 0; i < numFaces; i++) {
7080 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7081 // Adjust crop region from sensor output coordinate system to active
7082 // array coordinate system.
7083 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7084 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7085 rect.width, rect.height);
7086
7087 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7088 faceRectangles+j, -1);
7089
Jason Lee8ce36fa2017-04-19 19:40:37 -07007090 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7091 "bottom-right (%d, %d)",
7092 faceDetectionInfo->frame_id, i,
7093 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7094 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7095
Thierry Strudel3d639192016-09-09 11:52:26 -07007096 j+= 4;
7097 }
7098 if (numFaces <= 0) {
7099 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7100 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7101 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7102 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7103 }
7104
7105 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7106 numFaces);
7107 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7108 faceRectangles, numFaces * 4U);
7109 if (fwk_faceDetectMode ==
7110 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7111 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7112 CAM_INTF_META_FACE_LANDMARK, metadata) {
7113
7114 for (size_t i = 0; i < numFaces; i++) {
7115 // Map the co-ordinate sensor output coordinate system to active
7116 // array coordinate system.
7117 mCropRegionMapper.toActiveArray(
7118 landmarks->face_landmarks[i].left_eye_center.x,
7119 landmarks->face_landmarks[i].left_eye_center.y);
7120 mCropRegionMapper.toActiveArray(
7121 landmarks->face_landmarks[i].right_eye_center.x,
7122 landmarks->face_landmarks[i].right_eye_center.y);
7123 mCropRegionMapper.toActiveArray(
7124 landmarks->face_landmarks[i].mouth_center.x,
7125 landmarks->face_landmarks[i].mouth_center.y);
7126
7127 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007128
7129 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7130 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7131 faceDetectionInfo->frame_id, i,
7132 faceLandmarks[k + LEFT_EYE_X],
7133 faceLandmarks[k + LEFT_EYE_Y],
7134 faceLandmarks[k + RIGHT_EYE_X],
7135 faceLandmarks[k + RIGHT_EYE_Y],
7136 faceLandmarks[k + MOUTH_X],
7137 faceLandmarks[k + MOUTH_Y]);
7138
Thierry Strudel04e026f2016-10-10 11:27:36 -07007139 k+= TOTAL_LANDMARK_INDICES;
7140 }
7141 } else {
7142 for (size_t i = 0; i < numFaces; i++) {
7143 setInvalidLandmarks(faceLandmarks+k);
7144 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007145 }
7146 }
7147
Jason Lee49619db2017-04-13 12:07:22 -07007148 for (size_t i = 0; i < numFaces; i++) {
7149 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7150
7151 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7152 faceDetectionInfo->frame_id, i, faceIds[i]);
7153 }
7154
Thierry Strudel3d639192016-09-09 11:52:26 -07007155 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7156 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7157 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007158 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007159 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7160 CAM_INTF_META_FACE_BLINK, metadata) {
7161 uint8_t detected[MAX_ROI];
7162 uint8_t degree[MAX_ROI * 2];
7163 for (size_t i = 0; i < numFaces; i++) {
7164 detected[i] = blinks->blink[i].blink_detected;
7165 degree[2 * i] = blinks->blink[i].left_blink;
7166 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007167
Jason Lee49619db2017-04-13 12:07:22 -07007168 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7169 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7170 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7171 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007172 }
7173 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7174 detected, numFaces);
7175 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7176 degree, numFaces * 2);
7177 }
7178 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7179 CAM_INTF_META_FACE_SMILE, metadata) {
7180 uint8_t degree[MAX_ROI];
7181 uint8_t confidence[MAX_ROI];
7182 for (size_t i = 0; i < numFaces; i++) {
7183 degree[i] = smiles->smile[i].smile_degree;
7184 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007185
Jason Lee49619db2017-04-13 12:07:22 -07007186 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7187 "smile_degree=%d, smile_score=%d",
7188 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007189 }
7190 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7191 degree, numFaces);
7192 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7193 confidence, numFaces);
7194 }
7195 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7196 CAM_INTF_META_FACE_GAZE, metadata) {
7197 int8_t angle[MAX_ROI];
7198 int32_t direction[MAX_ROI * 3];
7199 int8_t degree[MAX_ROI * 2];
7200 for (size_t i = 0; i < numFaces; i++) {
7201 angle[i] = gazes->gaze[i].gaze_angle;
7202 direction[3 * i] = gazes->gaze[i].updown_dir;
7203 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7204 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7205 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7206 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007207
7208 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7209 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7210 "left_right_gaze=%d, top_bottom_gaze=%d",
7211 faceDetectionInfo->frame_id, i, angle[i],
7212 direction[3 * i], direction[3 * i + 1],
7213 direction[3 * i + 2],
7214 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007215 }
7216 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7217 (uint8_t *)angle, numFaces);
7218 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7219 direction, numFaces * 3);
7220 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7221 (uint8_t *)degree, numFaces * 2);
7222 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007223 }
7224 }
7225 }
7226 }
7227
7228 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7229 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007230 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007231 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007232 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007233
Shuzhen Wang14415f52016-11-16 18:26:18 -08007234 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7235 histogramBins = *histBins;
7236 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7237 }
7238
7239 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007240 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7241 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007242 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007243
7244 switch (stats_data->type) {
7245 case CAM_HISTOGRAM_TYPE_BAYER:
7246 switch (stats_data->bayer_stats.data_type) {
7247 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007248 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7249 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007250 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007251 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7252 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007253 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007254 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7255 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007256 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007257 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007258 case CAM_STATS_CHANNEL_R:
7259 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007260 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7261 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007262 }
7263 break;
7264 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007265 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007266 break;
7267 }
7268
Shuzhen Wang14415f52016-11-16 18:26:18 -08007269 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007270 }
7271 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007272 }
7273
7274 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7275 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7276 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7277 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7278 }
7279
7280 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7281 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7282 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7283 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7284 }
7285
7286 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7287 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7288 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7289 CAM_MAX_SHADING_MAP_HEIGHT);
7290 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7291 CAM_MAX_SHADING_MAP_WIDTH);
7292 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7293 lensShadingMap->lens_shading, 4U * map_width * map_height);
7294 }
7295
7296 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7297 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7298 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7299 }
7300
7301 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7302 //Populate CAM_INTF_META_TONEMAP_CURVES
7303 /* ch0 = G, ch 1 = B, ch 2 = R*/
7304 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7305 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7306 tonemap->tonemap_points_cnt,
7307 CAM_MAX_TONEMAP_CURVE_SIZE);
7308 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7309 }
7310
7311 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7312 &tonemap->curves[0].tonemap_points[0][0],
7313 tonemap->tonemap_points_cnt * 2);
7314
7315 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7316 &tonemap->curves[1].tonemap_points[0][0],
7317 tonemap->tonemap_points_cnt * 2);
7318
7319 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7320 &tonemap->curves[2].tonemap_points[0][0],
7321 tonemap->tonemap_points_cnt * 2);
7322 }
7323
7324 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7325 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7326 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7327 CC_GAIN_MAX);
7328 }
7329
7330 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7331 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7332 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7333 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7334 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7335 }
7336
7337 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7338 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7339 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7340 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7341 toneCurve->tonemap_points_cnt,
7342 CAM_MAX_TONEMAP_CURVE_SIZE);
7343 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7344 }
7345 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7346 (float*)toneCurve->curve.tonemap_points,
7347 toneCurve->tonemap_points_cnt * 2);
7348 }
7349
7350 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7351 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7352 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7353 predColorCorrectionGains->gains, 4);
7354 }
7355
7356 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7357 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7358 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7359 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7360 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7361 }
7362
7363 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7364 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7365 }
7366
7367 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7368 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7369 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7370 }
7371
7372 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7373 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7374 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7375 }
7376
7377 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7378 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7379 *effectMode);
7380 if (NAME_NOT_FOUND != val) {
7381 uint8_t fwk_effectMode = (uint8_t)val;
7382 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7383 }
7384 }
7385
7386 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7387 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7388 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7389 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7390 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7391 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7392 }
7393 int32_t fwk_testPatternData[4];
7394 fwk_testPatternData[0] = testPatternData->r;
7395 fwk_testPatternData[3] = testPatternData->b;
7396 switch (gCamCapability[mCameraId]->color_arrangement) {
7397 case CAM_FILTER_ARRANGEMENT_RGGB:
7398 case CAM_FILTER_ARRANGEMENT_GRBG:
7399 fwk_testPatternData[1] = testPatternData->gr;
7400 fwk_testPatternData[2] = testPatternData->gb;
7401 break;
7402 case CAM_FILTER_ARRANGEMENT_GBRG:
7403 case CAM_FILTER_ARRANGEMENT_BGGR:
7404 fwk_testPatternData[2] = testPatternData->gr;
7405 fwk_testPatternData[1] = testPatternData->gb;
7406 break;
7407 default:
7408 LOGE("color arrangement %d is not supported",
7409 gCamCapability[mCameraId]->color_arrangement);
7410 break;
7411 }
7412 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7413 }
7414
7415 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7416 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7417 }
7418
7419 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7420 String8 str((const char *)gps_methods);
7421 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7422 }
7423
7424 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7425 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7426 }
7427
7428 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7429 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7430 }
7431
7432 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7433 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7434 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7435 }
7436
7437 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7438 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7439 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7440 }
7441
7442 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7443 int32_t fwk_thumb_size[2];
7444 fwk_thumb_size[0] = thumb_size->width;
7445 fwk_thumb_size[1] = thumb_size->height;
7446 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7447 }
7448
7449 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7450 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7451 privateData,
7452 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7453 }
7454
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007455 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007456 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007457 meteringMode, 1);
7458 }
7459
Thierry Strudel54dc9782017-02-15 12:12:10 -08007460 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7461 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7462 LOGD("hdr_scene_data: %d %f\n",
7463 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7464 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7465 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7466 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7467 &isHdr, 1);
7468 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7469 &isHdrConfidence, 1);
7470 }
7471
7472
7473
Thierry Strudel3d639192016-09-09 11:52:26 -07007474 if (metadata->is_tuning_params_valid) {
7475 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7476 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7477 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7478
7479
7480 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7481 sizeof(uint32_t));
7482 data += sizeof(uint32_t);
7483
7484 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7485 sizeof(uint32_t));
7486 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7487 data += sizeof(uint32_t);
7488
7489 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7490 sizeof(uint32_t));
7491 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7492 data += sizeof(uint32_t);
7493
7494 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7495 sizeof(uint32_t));
7496 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7497 data += sizeof(uint32_t);
7498
7499 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7500 sizeof(uint32_t));
7501 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7502 data += sizeof(uint32_t);
7503
7504 metadata->tuning_params.tuning_mod3_data_size = 0;
7505 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7506 sizeof(uint32_t));
7507 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7508 data += sizeof(uint32_t);
7509
7510 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7511 TUNING_SENSOR_DATA_MAX);
7512 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7513 count);
7514 data += count;
7515
7516 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7517 TUNING_VFE_DATA_MAX);
7518 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7519 count);
7520 data += count;
7521
7522 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7523 TUNING_CPP_DATA_MAX);
7524 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7525 count);
7526 data += count;
7527
7528 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7529 TUNING_CAC_DATA_MAX);
7530 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7531 count);
7532 data += count;
7533
7534 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7535 (int32_t *)(void *)tuning_meta_data_blob,
7536 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7537 }
7538
7539 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7540 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7541 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7542 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7543 NEUTRAL_COL_POINTS);
7544 }
7545
7546 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7547 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7548 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7549 }
7550
7551 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7552 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7553 // Adjust crop region from sensor output coordinate system to active
7554 // array coordinate system.
7555 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7556 hAeRegions->rect.width, hAeRegions->rect.height);
7557
7558 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7559 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7560 REGIONS_TUPLE_COUNT);
7561 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7562 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7563 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7564 hAeRegions->rect.height);
7565 }
7566
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007567 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7568 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7569 if (NAME_NOT_FOUND != val) {
7570 uint8_t fwkAfMode = (uint8_t)val;
7571 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7572 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7573 } else {
7574 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7575 val);
7576 }
7577 }
7578
Thierry Strudel3d639192016-09-09 11:52:26 -07007579 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7580 uint8_t fwk_afState = (uint8_t) *afState;
7581 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007582 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007583 }
7584
7585 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7586 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7587 }
7588
7589 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7590 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7591 }
7592
7593 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7594 uint8_t fwk_lensState = *lensState;
7595 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7596 }
7597
Thierry Strudel3d639192016-09-09 11:52:26 -07007598
7599 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007600 uint32_t ab_mode = *hal_ab_mode;
7601 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7602 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7603 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7604 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007605 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007606 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007607 if (NAME_NOT_FOUND != val) {
7608 uint8_t fwk_ab_mode = (uint8_t)val;
7609 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7610 }
7611 }
7612
7613 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7614 int val = lookupFwkName(SCENE_MODES_MAP,
7615 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7616 if (NAME_NOT_FOUND != val) {
7617 uint8_t fwkBestshotMode = (uint8_t)val;
7618 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7619 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7620 } else {
7621 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7622 }
7623 }
7624
7625 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7626 uint8_t fwk_mode = (uint8_t) *mode;
7627 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7628 }
7629
7630 /* Constant metadata values to be update*/
7631 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7632 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7633
7634 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7635 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7636
7637 int32_t hotPixelMap[2];
7638 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7639
7640 // CDS
7641 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7642 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7643 }
7644
Thierry Strudel04e026f2016-10-10 11:27:36 -07007645 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7646 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007647 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007648 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7649 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7650 } else {
7651 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7652 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007653
7654 if(fwk_hdr != curr_hdr_state) {
7655 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7656 if(fwk_hdr)
7657 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7658 else
7659 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7660 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007661 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7662 }
7663
Thierry Strudel54dc9782017-02-15 12:12:10 -08007664 //binning correction
7665 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7666 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7667 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7668 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7669 }
7670
Thierry Strudel04e026f2016-10-10 11:27:36 -07007671 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007672 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007673 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7674 int8_t is_ir_on = 0;
7675
7676 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7677 if(is_ir_on != curr_ir_state) {
7678 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7679 if(is_ir_on)
7680 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7681 else
7682 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7683 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007684 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007685 }
7686
Thierry Strudel269c81a2016-10-12 12:13:59 -07007687 // AEC SPEED
7688 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7689 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7690 }
7691
7692 // AWB SPEED
7693 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7694 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7695 }
7696
Thierry Strudel3d639192016-09-09 11:52:26 -07007697 // TNR
7698 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7699 uint8_t tnr_enable = tnr->denoise_enable;
7700 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007701 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7702 int8_t is_tnr_on = 0;
7703
7704 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7705 if(is_tnr_on != curr_tnr_state) {
7706 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7707 if(is_tnr_on)
7708 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7709 else
7710 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7711 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007712
7713 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7714 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7715 }
7716
7717 // Reprocess crop data
7718 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7719 uint8_t cnt = crop_data->num_of_streams;
7720 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7721 // mm-qcamera-daemon only posts crop_data for streams
7722 // not linked to pproc. So no valid crop metadata is not
7723 // necessarily an error case.
7724 LOGD("No valid crop metadata entries");
7725 } else {
7726 uint32_t reproc_stream_id;
7727 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7728 LOGD("No reprocessible stream found, ignore crop data");
7729 } else {
7730 int rc = NO_ERROR;
7731 Vector<int32_t> roi_map;
7732 int32_t *crop = new int32_t[cnt*4];
7733 if (NULL == crop) {
7734 rc = NO_MEMORY;
7735 }
7736 if (NO_ERROR == rc) {
7737 int32_t streams_found = 0;
7738 for (size_t i = 0; i < cnt; i++) {
7739 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7740 if (pprocDone) {
7741 // HAL already does internal reprocessing,
7742 // either via reprocessing before JPEG encoding,
7743 // or offline postprocessing for pproc bypass case.
7744 crop[0] = 0;
7745 crop[1] = 0;
7746 crop[2] = mInputStreamInfo.dim.width;
7747 crop[3] = mInputStreamInfo.dim.height;
7748 } else {
7749 crop[0] = crop_data->crop_info[i].crop.left;
7750 crop[1] = crop_data->crop_info[i].crop.top;
7751 crop[2] = crop_data->crop_info[i].crop.width;
7752 crop[3] = crop_data->crop_info[i].crop.height;
7753 }
7754 roi_map.add(crop_data->crop_info[i].roi_map.left);
7755 roi_map.add(crop_data->crop_info[i].roi_map.top);
7756 roi_map.add(crop_data->crop_info[i].roi_map.width);
7757 roi_map.add(crop_data->crop_info[i].roi_map.height);
7758 streams_found++;
7759 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7760 crop[0], crop[1], crop[2], crop[3]);
7761 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7762 crop_data->crop_info[i].roi_map.left,
7763 crop_data->crop_info[i].roi_map.top,
7764 crop_data->crop_info[i].roi_map.width,
7765 crop_data->crop_info[i].roi_map.height);
7766 break;
7767
7768 }
7769 }
7770 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7771 &streams_found, 1);
7772 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7773 crop, (size_t)(streams_found * 4));
7774 if (roi_map.array()) {
7775 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7776 roi_map.array(), roi_map.size());
7777 }
7778 }
7779 if (crop) {
7780 delete [] crop;
7781 }
7782 }
7783 }
7784 }
7785
7786 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7787 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7788 // so hardcoding the CAC result to OFF mode.
7789 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7790 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7791 } else {
7792 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7793 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7794 *cacMode);
7795 if (NAME_NOT_FOUND != val) {
7796 uint8_t resultCacMode = (uint8_t)val;
7797 // check whether CAC result from CB is equal to Framework set CAC mode
7798 // If not equal then set the CAC mode came in corresponding request
7799 if (fwk_cacMode != resultCacMode) {
7800 resultCacMode = fwk_cacMode;
7801 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007802 //Check if CAC is disabled by property
7803 if (m_cacModeDisabled) {
7804 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7805 }
7806
Thierry Strudel3d639192016-09-09 11:52:26 -07007807 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7808 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7809 } else {
7810 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7811 }
7812 }
7813 }
7814
7815 // Post blob of cam_cds_data through vendor tag.
7816 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7817 uint8_t cnt = cdsInfo->num_of_streams;
7818 cam_cds_data_t cdsDataOverride;
7819 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7820 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7821 cdsDataOverride.num_of_streams = 1;
7822 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7823 uint32_t reproc_stream_id;
7824 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7825 LOGD("No reprocessible stream found, ignore cds data");
7826 } else {
7827 for (size_t i = 0; i < cnt; i++) {
7828 if (cdsInfo->cds_info[i].stream_id ==
7829 reproc_stream_id) {
7830 cdsDataOverride.cds_info[0].cds_enable =
7831 cdsInfo->cds_info[i].cds_enable;
7832 break;
7833 }
7834 }
7835 }
7836 } else {
7837 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7838 }
7839 camMetadata.update(QCAMERA3_CDS_INFO,
7840 (uint8_t *)&cdsDataOverride,
7841 sizeof(cam_cds_data_t));
7842 }
7843
7844 // Ldaf calibration data
7845 if (!mLdafCalibExist) {
7846 IF_META_AVAILABLE(uint32_t, ldafCalib,
7847 CAM_INTF_META_LDAF_EXIF, metadata) {
7848 mLdafCalibExist = true;
7849 mLdafCalib[0] = ldafCalib[0];
7850 mLdafCalib[1] = ldafCalib[1];
7851 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7852 ldafCalib[0], ldafCalib[1]);
7853 }
7854 }
7855
Thierry Strudel54dc9782017-02-15 12:12:10 -08007856 // EXIF debug data through vendor tag
7857 /*
7858 * Mobicat Mask can assume 3 values:
7859 * 1 refers to Mobicat data,
7860 * 2 refers to Stats Debug and Exif Debug Data
7861 * 3 refers to Mobicat and Stats Debug Data
7862 * We want to make sure that we are sending Exif debug data
7863 * only when Mobicat Mask is 2.
7864 */
7865 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7866 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7867 (uint8_t *)(void *)mExifParams.debug_params,
7868 sizeof(mm_jpeg_debug_exif_params_t));
7869 }
7870
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007871 // Reprocess and DDM debug data through vendor tag
7872 cam_reprocess_info_t repro_info;
7873 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007874 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7875 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007876 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007877 }
7878 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7879 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007880 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007881 }
7882 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7883 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007884 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007885 }
7886 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7887 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007888 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007889 }
7890 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7891 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007892 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007893 }
7894 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007895 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007896 }
7897 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7898 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007899 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007900 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007901 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7902 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7903 }
7904 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7905 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7906 }
7907 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7908 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007909
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007910 // INSTANT AEC MODE
7911 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7912 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7913 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7914 }
7915
Shuzhen Wange763e802016-03-31 10:24:29 -07007916 // AF scene change
7917 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7918 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7919 }
7920
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07007921 // Enable ZSL
7922 if (enableZsl != nullptr) {
7923 uint8_t value = *enableZsl ?
7924 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
7925 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
7926 }
7927
Xu Han821ea9c2017-05-23 09:00:40 -07007928 // OIS Data
7929 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
7930 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
7931 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
7932 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
7933 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
7934 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
7935 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
7936 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
7937 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
7938 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
7939 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
7940 }
7941
Thierry Strudel3d639192016-09-09 11:52:26 -07007942 resultMetadata = camMetadata.release();
7943 return resultMetadata;
7944}
7945
7946/*===========================================================================
7947 * FUNCTION : saveExifParams
7948 *
7949 * DESCRIPTION:
7950 *
7951 * PARAMETERS :
7952 * @metadata : metadata information from callback
7953 *
7954 * RETURN : none
7955 *
7956 *==========================================================================*/
7957void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7958{
7959 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7960 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7961 if (mExifParams.debug_params) {
7962 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7963 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7964 }
7965 }
7966 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7967 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7968 if (mExifParams.debug_params) {
7969 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7970 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7971 }
7972 }
7973 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7974 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7975 if (mExifParams.debug_params) {
7976 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7977 mExifParams.debug_params->af_debug_params_valid = TRUE;
7978 }
7979 }
7980 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7981 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7982 if (mExifParams.debug_params) {
7983 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7984 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7985 }
7986 }
7987 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7988 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7989 if (mExifParams.debug_params) {
7990 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7991 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7992 }
7993 }
7994 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7995 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7996 if (mExifParams.debug_params) {
7997 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7998 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7999 }
8000 }
8001 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8002 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8003 if (mExifParams.debug_params) {
8004 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8005 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8006 }
8007 }
8008 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8009 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8010 if (mExifParams.debug_params) {
8011 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8012 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8013 }
8014 }
8015}
8016
8017/*===========================================================================
8018 * FUNCTION : get3AExifParams
8019 *
8020 * DESCRIPTION:
8021 *
8022 * PARAMETERS : none
8023 *
8024 *
8025 * RETURN : mm_jpeg_exif_params_t
8026 *
8027 *==========================================================================*/
8028mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8029{
8030 return mExifParams;
8031}
8032
8033/*===========================================================================
8034 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8035 *
8036 * DESCRIPTION:
8037 *
8038 * PARAMETERS :
8039 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008040 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8041 * urgent metadata in a batch. Always true for
8042 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008043 *
8044 * RETURN : camera_metadata_t*
8045 * metadata in a format specified by fwk
8046 *==========================================================================*/
8047camera_metadata_t*
8048QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008049 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008050{
8051 CameraMetadata camMetadata;
8052 camera_metadata_t *resultMetadata;
8053
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008054 if (!lastUrgentMetadataInBatch) {
8055 /* In batch mode, use empty metadata if this is not the last in batch
8056 */
8057 resultMetadata = allocate_camera_metadata(0, 0);
8058 return resultMetadata;
8059 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008060
8061 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8062 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8063 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8064 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8065 }
8066
8067 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8068 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8069 &aecTrigger->trigger, 1);
8070 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8071 &aecTrigger->trigger_id, 1);
8072 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8073 aecTrigger->trigger);
8074 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8075 aecTrigger->trigger_id);
8076 }
8077
8078 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8079 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8080 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8081 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8082 }
8083
Thierry Strudel3d639192016-09-09 11:52:26 -07008084 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8085 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8086 &af_trigger->trigger, 1);
8087 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8088 af_trigger->trigger);
8089 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8090 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8091 af_trigger->trigger_id);
8092 }
8093
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008094 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8095 /*af regions*/
8096 int32_t afRegions[REGIONS_TUPLE_COUNT];
8097 // Adjust crop region from sensor output coordinate system to active
8098 // array coordinate system.
8099 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8100 hAfRegions->rect.width, hAfRegions->rect.height);
8101
8102 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8103 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8104 REGIONS_TUPLE_COUNT);
8105 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8106 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8107 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8108 hAfRegions->rect.height);
8109 }
8110
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008111 // AF region confidence
8112 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8113 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8114 }
8115
Thierry Strudel3d639192016-09-09 11:52:26 -07008116 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8117 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8118 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8119 if (NAME_NOT_FOUND != val) {
8120 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8121 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8122 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8123 } else {
8124 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8125 }
8126 }
8127
8128 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8129 uint32_t aeMode = CAM_AE_MODE_MAX;
8130 int32_t flashMode = CAM_FLASH_MODE_MAX;
8131 int32_t redeye = -1;
8132 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8133 aeMode = *pAeMode;
8134 }
8135 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8136 flashMode = *pFlashMode;
8137 }
8138 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8139 redeye = *pRedeye;
8140 }
8141
8142 if (1 == redeye) {
8143 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8144 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8145 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8146 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8147 flashMode);
8148 if (NAME_NOT_FOUND != val) {
8149 fwk_aeMode = (uint8_t)val;
8150 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8151 } else {
8152 LOGE("Unsupported flash mode %d", flashMode);
8153 }
8154 } else if (aeMode == CAM_AE_MODE_ON) {
8155 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8156 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8157 } else if (aeMode == CAM_AE_MODE_OFF) {
8158 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8159 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008160 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8161 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8162 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008163 } else {
8164 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8165 "flashMode:%d, aeMode:%u!!!",
8166 redeye, flashMode, aeMode);
8167 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008168 if (mInstantAEC) {
8169 // Increment frame Idx count untill a bound reached for instant AEC.
8170 mInstantAecFrameIdxCount++;
8171 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8172 CAM_INTF_META_AEC_INFO, metadata) {
8173 LOGH("ae_params->settled = %d",ae_params->settled);
8174 // If AEC settled, or if number of frames reached bound value,
8175 // should reset instant AEC.
8176 if (ae_params->settled ||
8177 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8178 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8179 mInstantAEC = false;
8180 mResetInstantAEC = true;
8181 mInstantAecFrameIdxCount = 0;
8182 }
8183 }
8184 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008185 resultMetadata = camMetadata.release();
8186 return resultMetadata;
8187}
8188
8189/*===========================================================================
8190 * FUNCTION : dumpMetadataToFile
8191 *
8192 * DESCRIPTION: Dumps tuning metadata to file system
8193 *
8194 * PARAMETERS :
8195 * @meta : tuning metadata
8196 * @dumpFrameCount : current dump frame count
8197 * @enabled : Enable mask
8198 *
8199 *==========================================================================*/
8200void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8201 uint32_t &dumpFrameCount,
8202 bool enabled,
8203 const char *type,
8204 uint32_t frameNumber)
8205{
8206 //Some sanity checks
8207 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8208 LOGE("Tuning sensor data size bigger than expected %d: %d",
8209 meta.tuning_sensor_data_size,
8210 TUNING_SENSOR_DATA_MAX);
8211 return;
8212 }
8213
8214 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8215 LOGE("Tuning VFE data size bigger than expected %d: %d",
8216 meta.tuning_vfe_data_size,
8217 TUNING_VFE_DATA_MAX);
8218 return;
8219 }
8220
8221 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8222 LOGE("Tuning CPP data size bigger than expected %d: %d",
8223 meta.tuning_cpp_data_size,
8224 TUNING_CPP_DATA_MAX);
8225 return;
8226 }
8227
8228 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8229 LOGE("Tuning CAC data size bigger than expected %d: %d",
8230 meta.tuning_cac_data_size,
8231 TUNING_CAC_DATA_MAX);
8232 return;
8233 }
8234 //
8235
8236 if(enabled){
8237 char timeBuf[FILENAME_MAX];
8238 char buf[FILENAME_MAX];
8239 memset(buf, 0, sizeof(buf));
8240 memset(timeBuf, 0, sizeof(timeBuf));
8241 time_t current_time;
8242 struct tm * timeinfo;
8243 time (&current_time);
8244 timeinfo = localtime (&current_time);
8245 if (timeinfo != NULL) {
8246 strftime (timeBuf, sizeof(timeBuf),
8247 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8248 }
8249 String8 filePath(timeBuf);
8250 snprintf(buf,
8251 sizeof(buf),
8252 "%dm_%s_%d.bin",
8253 dumpFrameCount,
8254 type,
8255 frameNumber);
8256 filePath.append(buf);
8257 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8258 if (file_fd >= 0) {
8259 ssize_t written_len = 0;
8260 meta.tuning_data_version = TUNING_DATA_VERSION;
8261 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8262 written_len += write(file_fd, data, sizeof(uint32_t));
8263 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8264 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8265 written_len += write(file_fd, data, sizeof(uint32_t));
8266 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8267 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8268 written_len += write(file_fd, data, sizeof(uint32_t));
8269 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8270 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8271 written_len += write(file_fd, data, sizeof(uint32_t));
8272 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8273 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8274 written_len += write(file_fd, data, sizeof(uint32_t));
8275 meta.tuning_mod3_data_size = 0;
8276 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8277 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8278 written_len += write(file_fd, data, sizeof(uint32_t));
8279 size_t total_size = meta.tuning_sensor_data_size;
8280 data = (void *)((uint8_t *)&meta.data);
8281 written_len += write(file_fd, data, total_size);
8282 total_size = meta.tuning_vfe_data_size;
8283 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8284 written_len += write(file_fd, data, total_size);
8285 total_size = meta.tuning_cpp_data_size;
8286 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8287 written_len += write(file_fd, data, total_size);
8288 total_size = meta.tuning_cac_data_size;
8289 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8290 written_len += write(file_fd, data, total_size);
8291 close(file_fd);
8292 }else {
8293 LOGE("fail to open file for metadata dumping");
8294 }
8295 }
8296}
8297
8298/*===========================================================================
8299 * FUNCTION : cleanAndSortStreamInfo
8300 *
8301 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8302 * and sort them such that raw stream is at the end of the list
8303 * This is a workaround for camera daemon constraint.
8304 *
8305 * PARAMETERS : None
8306 *
8307 *==========================================================================*/
8308void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8309{
8310 List<stream_info_t *> newStreamInfo;
8311
8312 /*clean up invalid streams*/
8313 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8314 it != mStreamInfo.end();) {
8315 if(((*it)->status) == INVALID){
8316 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8317 delete channel;
8318 free(*it);
8319 it = mStreamInfo.erase(it);
8320 } else {
8321 it++;
8322 }
8323 }
8324
8325 // Move preview/video/callback/snapshot streams into newList
8326 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8327 it != mStreamInfo.end();) {
8328 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8329 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8330 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8331 newStreamInfo.push_back(*it);
8332 it = mStreamInfo.erase(it);
8333 } else
8334 it++;
8335 }
8336 // Move raw streams into newList
8337 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8338 it != mStreamInfo.end();) {
8339 newStreamInfo.push_back(*it);
8340 it = mStreamInfo.erase(it);
8341 }
8342
8343 mStreamInfo = newStreamInfo;
8344}
8345
8346/*===========================================================================
8347 * FUNCTION : extractJpegMetadata
8348 *
8349 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8350 * JPEG metadata is cached in HAL, and return as part of capture
8351 * result when metadata is returned from camera daemon.
8352 *
8353 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8354 * @request: capture request
8355 *
8356 *==========================================================================*/
8357void QCamera3HardwareInterface::extractJpegMetadata(
8358 CameraMetadata& jpegMetadata,
8359 const camera3_capture_request_t *request)
8360{
8361 CameraMetadata frame_settings;
8362 frame_settings = request->settings;
8363
8364 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8365 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8366 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8367 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8368
8369 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8370 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8371 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8372 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8373
8374 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8375 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8376 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8377 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8378
8379 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8380 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8381 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8382 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8383
8384 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8385 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8386 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8387 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8388
8389 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8390 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8391 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8392 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8393
8394 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8395 int32_t thumbnail_size[2];
8396 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8397 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8398 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8399 int32_t orientation =
8400 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008401 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008402 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8403 int32_t temp;
8404 temp = thumbnail_size[0];
8405 thumbnail_size[0] = thumbnail_size[1];
8406 thumbnail_size[1] = temp;
8407 }
8408 }
8409 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8410 thumbnail_size,
8411 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8412 }
8413
8414}
8415
8416/*===========================================================================
8417 * FUNCTION : convertToRegions
8418 *
8419 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8420 *
8421 * PARAMETERS :
8422 * @rect : cam_rect_t struct to convert
8423 * @region : int32_t destination array
8424 * @weight : if we are converting from cam_area_t, weight is valid
8425 * else weight = -1
8426 *
8427 *==========================================================================*/
8428void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8429 int32_t *region, int weight)
8430{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008431 region[FACE_LEFT] = rect.left;
8432 region[FACE_TOP] = rect.top;
8433 region[FACE_RIGHT] = rect.left + rect.width;
8434 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008435 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008436 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008437 }
8438}
8439
8440/*===========================================================================
8441 * FUNCTION : convertFromRegions
8442 *
8443 * DESCRIPTION: helper method to convert from array to cam_rect_t
8444 *
8445 * PARAMETERS :
8446 * @rect : cam_rect_t struct to convert
8447 * @region : int32_t destination array
8448 * @weight : if we are converting from cam_area_t, weight is valid
8449 * else weight = -1
8450 *
8451 *==========================================================================*/
8452void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008453 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008454{
Thierry Strudel3d639192016-09-09 11:52:26 -07008455 int32_t x_min = frame_settings.find(tag).data.i32[0];
8456 int32_t y_min = frame_settings.find(tag).data.i32[1];
8457 int32_t x_max = frame_settings.find(tag).data.i32[2];
8458 int32_t y_max = frame_settings.find(tag).data.i32[3];
8459 roi.weight = frame_settings.find(tag).data.i32[4];
8460 roi.rect.left = x_min;
8461 roi.rect.top = y_min;
8462 roi.rect.width = x_max - x_min;
8463 roi.rect.height = y_max - y_min;
8464}
8465
8466/*===========================================================================
8467 * FUNCTION : resetIfNeededROI
8468 *
8469 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8470 * crop region
8471 *
8472 * PARAMETERS :
8473 * @roi : cam_area_t struct to resize
8474 * @scalerCropRegion : cam_crop_region_t region to compare against
8475 *
8476 *
8477 *==========================================================================*/
8478bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8479 const cam_crop_region_t* scalerCropRegion)
8480{
8481 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8482 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8483 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8484 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8485
8486 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8487 * without having this check the calculations below to validate if the roi
8488 * is inside scalar crop region will fail resulting in the roi not being
8489 * reset causing algorithm to continue to use stale roi window
8490 */
8491 if (roi->weight == 0) {
8492 return true;
8493 }
8494
8495 if ((roi_x_max < scalerCropRegion->left) ||
8496 // right edge of roi window is left of scalar crop's left edge
8497 (roi_y_max < scalerCropRegion->top) ||
8498 // bottom edge of roi window is above scalar crop's top edge
8499 (roi->rect.left > crop_x_max) ||
8500 // left edge of roi window is beyond(right) of scalar crop's right edge
8501 (roi->rect.top > crop_y_max)){
8502 // top edge of roi windo is above scalar crop's top edge
8503 return false;
8504 }
8505 if (roi->rect.left < scalerCropRegion->left) {
8506 roi->rect.left = scalerCropRegion->left;
8507 }
8508 if (roi->rect.top < scalerCropRegion->top) {
8509 roi->rect.top = scalerCropRegion->top;
8510 }
8511 if (roi_x_max > crop_x_max) {
8512 roi_x_max = crop_x_max;
8513 }
8514 if (roi_y_max > crop_y_max) {
8515 roi_y_max = crop_y_max;
8516 }
8517 roi->rect.width = roi_x_max - roi->rect.left;
8518 roi->rect.height = roi_y_max - roi->rect.top;
8519 return true;
8520}
8521
8522/*===========================================================================
8523 * FUNCTION : convertLandmarks
8524 *
8525 * DESCRIPTION: helper method to extract the landmarks from face detection info
8526 *
8527 * PARAMETERS :
8528 * @landmark_data : input landmark data to be converted
8529 * @landmarks : int32_t destination array
8530 *
8531 *
8532 *==========================================================================*/
8533void QCamera3HardwareInterface::convertLandmarks(
8534 cam_face_landmarks_info_t landmark_data,
8535 int32_t *landmarks)
8536{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008537 if (landmark_data.is_left_eye_valid) {
8538 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8539 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8540 } else {
8541 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8542 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8543 }
8544
8545 if (landmark_data.is_right_eye_valid) {
8546 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8547 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8548 } else {
8549 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8550 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8551 }
8552
8553 if (landmark_data.is_mouth_valid) {
8554 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8555 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8556 } else {
8557 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8558 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8559 }
8560}
8561
8562/*===========================================================================
8563 * FUNCTION : setInvalidLandmarks
8564 *
8565 * DESCRIPTION: helper method to set invalid landmarks
8566 *
8567 * PARAMETERS :
8568 * @landmarks : int32_t destination array
8569 *
8570 *
8571 *==========================================================================*/
8572void QCamera3HardwareInterface::setInvalidLandmarks(
8573 int32_t *landmarks)
8574{
8575 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8576 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8577 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8578 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8579 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8580 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008581}
8582
8583#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008584
8585/*===========================================================================
8586 * FUNCTION : getCapabilities
8587 *
8588 * DESCRIPTION: query camera capability from back-end
8589 *
8590 * PARAMETERS :
8591 * @ops : mm-interface ops structure
8592 * @cam_handle : camera handle for which we need capability
8593 *
8594 * RETURN : ptr type of capability structure
8595 * capability for success
8596 * NULL for failure
8597 *==========================================================================*/
8598cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8599 uint32_t cam_handle)
8600{
8601 int rc = NO_ERROR;
8602 QCamera3HeapMemory *capabilityHeap = NULL;
8603 cam_capability_t *cap_ptr = NULL;
8604
8605 if (ops == NULL) {
8606 LOGE("Invalid arguments");
8607 return NULL;
8608 }
8609
8610 capabilityHeap = new QCamera3HeapMemory(1);
8611 if (capabilityHeap == NULL) {
8612 LOGE("creation of capabilityHeap failed");
8613 return NULL;
8614 }
8615
8616 /* Allocate memory for capability buffer */
8617 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8618 if(rc != OK) {
8619 LOGE("No memory for cappability");
8620 goto allocate_failed;
8621 }
8622
8623 /* Map memory for capability buffer */
8624 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8625
8626 rc = ops->map_buf(cam_handle,
8627 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8628 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8629 if(rc < 0) {
8630 LOGE("failed to map capability buffer");
8631 rc = FAILED_TRANSACTION;
8632 goto map_failed;
8633 }
8634
8635 /* Query Capability */
8636 rc = ops->query_capability(cam_handle);
8637 if(rc < 0) {
8638 LOGE("failed to query capability");
8639 rc = FAILED_TRANSACTION;
8640 goto query_failed;
8641 }
8642
8643 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8644 if (cap_ptr == NULL) {
8645 LOGE("out of memory");
8646 rc = NO_MEMORY;
8647 goto query_failed;
8648 }
8649
8650 memset(cap_ptr, 0, sizeof(cam_capability_t));
8651 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8652
8653 int index;
8654 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8655 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8656 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8657 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8658 }
8659
8660query_failed:
8661 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8662map_failed:
8663 capabilityHeap->deallocate();
8664allocate_failed:
8665 delete capabilityHeap;
8666
8667 if (rc != NO_ERROR) {
8668 return NULL;
8669 } else {
8670 return cap_ptr;
8671 }
8672}
8673
Thierry Strudel3d639192016-09-09 11:52:26 -07008674/*===========================================================================
8675 * FUNCTION : initCapabilities
8676 *
8677 * DESCRIPTION: initialize camera capabilities in static data struct
8678 *
8679 * PARAMETERS :
8680 * @cameraId : camera Id
8681 *
8682 * RETURN : int32_t type of status
8683 * NO_ERROR -- success
8684 * none-zero failure code
8685 *==========================================================================*/
8686int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8687{
8688 int rc = 0;
8689 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008690 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008691
8692 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8693 if (rc) {
8694 LOGE("camera_open failed. rc = %d", rc);
8695 goto open_failed;
8696 }
8697 if (!cameraHandle) {
8698 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8699 goto open_failed;
8700 }
8701
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008702 handle = get_main_camera_handle(cameraHandle->camera_handle);
8703 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8704 if (gCamCapability[cameraId] == NULL) {
8705 rc = FAILED_TRANSACTION;
8706 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008707 }
8708
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008709 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008710 if (is_dual_camera_by_idx(cameraId)) {
8711 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8712 gCamCapability[cameraId]->aux_cam_cap =
8713 getCapabilities(cameraHandle->ops, handle);
8714 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8715 rc = FAILED_TRANSACTION;
8716 free(gCamCapability[cameraId]);
8717 goto failed_op;
8718 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008719
8720 // Copy the main camera capability to main_cam_cap struct
8721 gCamCapability[cameraId]->main_cam_cap =
8722 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8723 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8724 LOGE("out of memory");
8725 rc = NO_MEMORY;
8726 goto failed_op;
8727 }
8728 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8729 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008730 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008731failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008732 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8733 cameraHandle = NULL;
8734open_failed:
8735 return rc;
8736}
8737
8738/*==========================================================================
8739 * FUNCTION : get3Aversion
8740 *
8741 * DESCRIPTION: get the Q3A S/W version
8742 *
8743 * PARAMETERS :
8744 * @sw_version: Reference of Q3A structure which will hold version info upon
8745 * return
8746 *
8747 * RETURN : None
8748 *
8749 *==========================================================================*/
8750void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8751{
8752 if(gCamCapability[mCameraId])
8753 sw_version = gCamCapability[mCameraId]->q3a_version;
8754 else
8755 LOGE("Capability structure NULL!");
8756}
8757
8758
8759/*===========================================================================
8760 * FUNCTION : initParameters
8761 *
8762 * DESCRIPTION: initialize camera parameters
8763 *
8764 * PARAMETERS :
8765 *
8766 * RETURN : int32_t type of status
8767 * NO_ERROR -- success
8768 * none-zero failure code
8769 *==========================================================================*/
8770int QCamera3HardwareInterface::initParameters()
8771{
8772 int rc = 0;
8773
8774 //Allocate Set Param Buffer
8775 mParamHeap = new QCamera3HeapMemory(1);
8776 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8777 if(rc != OK) {
8778 rc = NO_MEMORY;
8779 LOGE("Failed to allocate SETPARM Heap memory");
8780 delete mParamHeap;
8781 mParamHeap = NULL;
8782 return rc;
8783 }
8784
8785 //Map memory for parameters buffer
8786 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8787 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8788 mParamHeap->getFd(0),
8789 sizeof(metadata_buffer_t),
8790 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8791 if(rc < 0) {
8792 LOGE("failed to map SETPARM buffer");
8793 rc = FAILED_TRANSACTION;
8794 mParamHeap->deallocate();
8795 delete mParamHeap;
8796 mParamHeap = NULL;
8797 return rc;
8798 }
8799
8800 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8801
8802 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8803 return rc;
8804}
8805
8806/*===========================================================================
8807 * FUNCTION : deinitParameters
8808 *
8809 * DESCRIPTION: de-initialize camera parameters
8810 *
8811 * PARAMETERS :
8812 *
8813 * RETURN : NONE
8814 *==========================================================================*/
8815void QCamera3HardwareInterface::deinitParameters()
8816{
8817 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8818 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8819
8820 mParamHeap->deallocate();
8821 delete mParamHeap;
8822 mParamHeap = NULL;
8823
8824 mParameters = NULL;
8825
8826 free(mPrevParameters);
8827 mPrevParameters = NULL;
8828}
8829
8830/*===========================================================================
8831 * FUNCTION : calcMaxJpegSize
8832 *
8833 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8834 *
8835 * PARAMETERS :
8836 *
8837 * RETURN : max_jpeg_size
8838 *==========================================================================*/
8839size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8840{
8841 size_t max_jpeg_size = 0;
8842 size_t temp_width, temp_height;
8843 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8844 MAX_SIZES_CNT);
8845 for (size_t i = 0; i < count; i++) {
8846 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8847 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8848 if (temp_width * temp_height > max_jpeg_size ) {
8849 max_jpeg_size = temp_width * temp_height;
8850 }
8851 }
8852 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8853 return max_jpeg_size;
8854}
8855
8856/*===========================================================================
8857 * FUNCTION : getMaxRawSize
8858 *
8859 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8860 *
8861 * PARAMETERS :
8862 *
8863 * RETURN : Largest supported Raw Dimension
8864 *==========================================================================*/
8865cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8866{
8867 int max_width = 0;
8868 cam_dimension_t maxRawSize;
8869
8870 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8871 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8872 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8873 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8874 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8875 }
8876 }
8877 return maxRawSize;
8878}
8879
8880
8881/*===========================================================================
8882 * FUNCTION : calcMaxJpegDim
8883 *
8884 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8885 *
8886 * PARAMETERS :
8887 *
8888 * RETURN : max_jpeg_dim
8889 *==========================================================================*/
8890cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8891{
8892 cam_dimension_t max_jpeg_dim;
8893 cam_dimension_t curr_jpeg_dim;
8894 max_jpeg_dim.width = 0;
8895 max_jpeg_dim.height = 0;
8896 curr_jpeg_dim.width = 0;
8897 curr_jpeg_dim.height = 0;
8898 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8899 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8900 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8901 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8902 max_jpeg_dim.width * max_jpeg_dim.height ) {
8903 max_jpeg_dim.width = curr_jpeg_dim.width;
8904 max_jpeg_dim.height = curr_jpeg_dim.height;
8905 }
8906 }
8907 return max_jpeg_dim;
8908}
8909
8910/*===========================================================================
8911 * FUNCTION : addStreamConfig
8912 *
8913 * DESCRIPTION: adds the stream configuration to the array
8914 *
8915 * PARAMETERS :
8916 * @available_stream_configs : pointer to stream configuration array
8917 * @scalar_format : scalar format
8918 * @dim : configuration dimension
8919 * @config_type : input or output configuration type
8920 *
8921 * RETURN : NONE
8922 *==========================================================================*/
8923void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8924 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8925{
8926 available_stream_configs.add(scalar_format);
8927 available_stream_configs.add(dim.width);
8928 available_stream_configs.add(dim.height);
8929 available_stream_configs.add(config_type);
8930}
8931
8932/*===========================================================================
8933 * FUNCTION : suppportBurstCapture
8934 *
8935 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8936 *
8937 * PARAMETERS :
8938 * @cameraId : camera Id
8939 *
8940 * RETURN : true if camera supports BURST_CAPTURE
8941 * false otherwise
8942 *==========================================================================*/
8943bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8944{
8945 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8946 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8947 const int32_t highResWidth = 3264;
8948 const int32_t highResHeight = 2448;
8949
8950 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8951 // Maximum resolution images cannot be captured at >= 10fps
8952 // -> not supporting BURST_CAPTURE
8953 return false;
8954 }
8955
8956 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8957 // Maximum resolution images can be captured at >= 20fps
8958 // --> supporting BURST_CAPTURE
8959 return true;
8960 }
8961
8962 // Find the smallest highRes resolution, or largest resolution if there is none
8963 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8964 MAX_SIZES_CNT);
8965 size_t highRes = 0;
8966 while ((highRes + 1 < totalCnt) &&
8967 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8968 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8969 highResWidth * highResHeight)) {
8970 highRes++;
8971 }
8972 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8973 return true;
8974 } else {
8975 return false;
8976 }
8977}
8978
8979/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008980 * FUNCTION : getPDStatIndex
8981 *
8982 * DESCRIPTION: Return the meta raw phase detection statistics index if present
8983 *
8984 * PARAMETERS :
8985 * @caps : camera capabilities
8986 *
8987 * RETURN : int32_t type
8988 * non-negative - on success
8989 * -1 - on failure
8990 *==========================================================================*/
8991int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
8992 if (nullptr == caps) {
8993 return -1;
8994 }
8995
8996 uint32_t metaRawCount = caps->meta_raw_channel_count;
8997 int32_t ret = -1;
8998 for (size_t i = 0; i < metaRawCount; i++) {
8999 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9000 ret = i;
9001 break;
9002 }
9003 }
9004
9005 return ret;
9006}
9007
9008/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009009 * FUNCTION : initStaticMetadata
9010 *
9011 * DESCRIPTION: initialize the static metadata
9012 *
9013 * PARAMETERS :
9014 * @cameraId : camera Id
9015 *
9016 * RETURN : int32_t type of status
9017 * 0 -- success
9018 * non-zero failure code
9019 *==========================================================================*/
9020int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9021{
9022 int rc = 0;
9023 CameraMetadata staticInfo;
9024 size_t count = 0;
9025 bool limitedDevice = false;
9026 char prop[PROPERTY_VALUE_MAX];
9027 bool supportBurst = false;
9028
9029 supportBurst = supportBurstCapture(cameraId);
9030
9031 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9032 * guaranteed or if min fps of max resolution is less than 20 fps, its
9033 * advertised as limited device*/
9034 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9035 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9036 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9037 !supportBurst;
9038
9039 uint8_t supportedHwLvl = limitedDevice ?
9040 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009041#ifndef USE_HAL_3_3
9042 // LEVEL_3 - This device will support level 3.
9043 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9044#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009045 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009046#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009047
9048 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9049 &supportedHwLvl, 1);
9050
9051 bool facingBack = false;
9052 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9053 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9054 facingBack = true;
9055 }
9056 /*HAL 3 only*/
9057 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9058 &gCamCapability[cameraId]->min_focus_distance, 1);
9059
9060 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9061 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9062
9063 /*should be using focal lengths but sensor doesn't provide that info now*/
9064 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9065 &gCamCapability[cameraId]->focal_length,
9066 1);
9067
9068 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9069 gCamCapability[cameraId]->apertures,
9070 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9071
9072 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9073 gCamCapability[cameraId]->filter_densities,
9074 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9075
9076
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009077 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9078 size_t mode_count =
9079 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9080 for (size_t i = 0; i < mode_count; i++) {
9081 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9082 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009083 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009084 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009085
9086 int32_t lens_shading_map_size[] = {
9087 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9088 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9089 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9090 lens_shading_map_size,
9091 sizeof(lens_shading_map_size)/sizeof(int32_t));
9092
9093 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9094 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9095
9096 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9097 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9098
9099 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9100 &gCamCapability[cameraId]->max_frame_duration, 1);
9101
9102 camera_metadata_rational baseGainFactor = {
9103 gCamCapability[cameraId]->base_gain_factor.numerator,
9104 gCamCapability[cameraId]->base_gain_factor.denominator};
9105 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9106 &baseGainFactor, 1);
9107
9108 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9109 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9110
9111 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9112 gCamCapability[cameraId]->pixel_array_size.height};
9113 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9114 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9115
9116 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9117 gCamCapability[cameraId]->active_array_size.top,
9118 gCamCapability[cameraId]->active_array_size.width,
9119 gCamCapability[cameraId]->active_array_size.height};
9120 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9121 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9122
9123 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9124 &gCamCapability[cameraId]->white_level, 1);
9125
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009126 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9127 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9128 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009129 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009130 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009131
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009132#ifndef USE_HAL_3_3
9133 bool hasBlackRegions = false;
9134 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9135 LOGW("black_region_count: %d is bounded to %d",
9136 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9137 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9138 }
9139 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9140 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9141 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9142 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9143 }
9144 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9145 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9146 hasBlackRegions = true;
9147 }
9148#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009149 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9150 &gCamCapability[cameraId]->flash_charge_duration, 1);
9151
9152 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9153 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9154
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009155 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9156 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9157 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009158 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9159 &timestampSource, 1);
9160
Thierry Strudel54dc9782017-02-15 12:12:10 -08009161 //update histogram vendor data
9162 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009163 &gCamCapability[cameraId]->histogram_size, 1);
9164
Thierry Strudel54dc9782017-02-15 12:12:10 -08009165 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009166 &gCamCapability[cameraId]->max_histogram_count, 1);
9167
Shuzhen Wang14415f52016-11-16 18:26:18 -08009168 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9169 //so that app can request fewer number of bins than the maximum supported.
9170 std::vector<int32_t> histBins;
9171 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9172 histBins.push_back(maxHistBins);
9173 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9174 (maxHistBins & 0x1) == 0) {
9175 histBins.push_back(maxHistBins >> 1);
9176 maxHistBins >>= 1;
9177 }
9178 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9179 histBins.data(), histBins.size());
9180
Thierry Strudel3d639192016-09-09 11:52:26 -07009181 int32_t sharpness_map_size[] = {
9182 gCamCapability[cameraId]->sharpness_map_size.width,
9183 gCamCapability[cameraId]->sharpness_map_size.height};
9184
9185 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9186 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9187
9188 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9189 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9190
Emilian Peev0f3c3162017-03-15 12:57:46 +00009191 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9192 if (0 <= indexPD) {
9193 // Advertise PD stats data as part of the Depth capabilities
9194 int32_t depthWidth =
9195 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9196 int32_t depthHeight =
9197 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9198 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9199 assert(0 < depthSamplesCount);
9200 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9201 &depthSamplesCount, 1);
9202
9203 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9204 depthHeight,
9205 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9206 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9207 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9208 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9209 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9210
9211 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9212 depthHeight, 33333333,
9213 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9214 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9215 depthMinDuration,
9216 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9217
9218 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9219 depthHeight, 0,
9220 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9221 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9222 depthStallDuration,
9223 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9224
9225 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9226 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9227 }
9228
Thierry Strudel3d639192016-09-09 11:52:26 -07009229 int32_t scalar_formats[] = {
9230 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9231 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9232 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9233 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9234 HAL_PIXEL_FORMAT_RAW10,
9235 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009236 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9237 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9238 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009239
9240 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9241 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9242 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9243 count, MAX_SIZES_CNT, available_processed_sizes);
9244 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9245 available_processed_sizes, count * 2);
9246
9247 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9248 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9249 makeTable(gCamCapability[cameraId]->raw_dim,
9250 count, MAX_SIZES_CNT, available_raw_sizes);
9251 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9252 available_raw_sizes, count * 2);
9253
9254 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9255 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9256 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9257 count, MAX_SIZES_CNT, available_fps_ranges);
9258 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9259 available_fps_ranges, count * 2);
9260
9261 camera_metadata_rational exposureCompensationStep = {
9262 gCamCapability[cameraId]->exp_compensation_step.numerator,
9263 gCamCapability[cameraId]->exp_compensation_step.denominator};
9264 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9265 &exposureCompensationStep, 1);
9266
9267 Vector<uint8_t> availableVstabModes;
9268 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9269 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009270 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009271 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009272 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009273 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009274 count = IS_TYPE_MAX;
9275 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9276 for (size_t i = 0; i < count; i++) {
9277 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9278 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9279 eisSupported = true;
9280 break;
9281 }
9282 }
9283 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009284 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9285 }
9286 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9287 availableVstabModes.array(), availableVstabModes.size());
9288
9289 /*HAL 1 and HAL 3 common*/
9290 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9291 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9292 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009293 // Cap the max zoom to the max preferred value
9294 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009295 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9296 &maxZoom, 1);
9297
9298 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9299 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9300
9301 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9302 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9303 max3aRegions[2] = 0; /* AF not supported */
9304 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9305 max3aRegions, 3);
9306
9307 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9308 memset(prop, 0, sizeof(prop));
9309 property_get("persist.camera.facedetect", prop, "1");
9310 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9311 LOGD("Support face detection mode: %d",
9312 supportedFaceDetectMode);
9313
9314 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009315 /* support mode should be OFF if max number of face is 0 */
9316 if (maxFaces <= 0) {
9317 supportedFaceDetectMode = 0;
9318 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009319 Vector<uint8_t> availableFaceDetectModes;
9320 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9321 if (supportedFaceDetectMode == 1) {
9322 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9323 } else if (supportedFaceDetectMode == 2) {
9324 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9325 } else if (supportedFaceDetectMode == 3) {
9326 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9327 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9328 } else {
9329 maxFaces = 0;
9330 }
9331 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9332 availableFaceDetectModes.array(),
9333 availableFaceDetectModes.size());
9334 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9335 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009336 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9337 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9338 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009339
9340 int32_t exposureCompensationRange[] = {
9341 gCamCapability[cameraId]->exposure_compensation_min,
9342 gCamCapability[cameraId]->exposure_compensation_max};
9343 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9344 exposureCompensationRange,
9345 sizeof(exposureCompensationRange)/sizeof(int32_t));
9346
9347 uint8_t lensFacing = (facingBack) ?
9348 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9349 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9350
9351 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9352 available_thumbnail_sizes,
9353 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9354
9355 /*all sizes will be clubbed into this tag*/
9356 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9357 /*android.scaler.availableStreamConfigurations*/
9358 Vector<int32_t> available_stream_configs;
9359 cam_dimension_t active_array_dim;
9360 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9361 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009362
9363 /*advertise list of input dimensions supported based on below property.
9364 By default all sizes upto 5MP will be advertised.
9365 Note that the setprop resolution format should be WxH.
9366 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9367 To list all supported sizes, setprop needs to be set with "0x0" */
9368 cam_dimension_t minInputSize = {2592,1944}; //5MP
9369 memset(prop, 0, sizeof(prop));
9370 property_get("persist.camera.input.minsize", prop, "2592x1944");
9371 if (strlen(prop) > 0) {
9372 char *saveptr = NULL;
9373 char *token = strtok_r(prop, "x", &saveptr);
9374 if (token != NULL) {
9375 minInputSize.width = atoi(token);
9376 }
9377 token = strtok_r(NULL, "x", &saveptr);
9378 if (token != NULL) {
9379 minInputSize.height = atoi(token);
9380 }
9381 }
9382
Thierry Strudel3d639192016-09-09 11:52:26 -07009383 /* Add input/output stream configurations for each scalar formats*/
9384 for (size_t j = 0; j < scalar_formats_count; j++) {
9385 switch (scalar_formats[j]) {
9386 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9387 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9388 case HAL_PIXEL_FORMAT_RAW10:
9389 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9390 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9391 addStreamConfig(available_stream_configs, scalar_formats[j],
9392 gCamCapability[cameraId]->raw_dim[i],
9393 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9394 }
9395 break;
9396 case HAL_PIXEL_FORMAT_BLOB:
9397 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9398 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9399 addStreamConfig(available_stream_configs, scalar_formats[j],
9400 gCamCapability[cameraId]->picture_sizes_tbl[i],
9401 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9402 }
9403 break;
9404 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9405 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9406 default:
9407 cam_dimension_t largest_picture_size;
9408 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9409 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9410 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9411 addStreamConfig(available_stream_configs, scalar_formats[j],
9412 gCamCapability[cameraId]->picture_sizes_tbl[i],
9413 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009414 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009415 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9416 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009417 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9418 >= minInputSize.width) || (gCamCapability[cameraId]->
9419 picture_sizes_tbl[i].height >= minInputSize.height)) {
9420 addStreamConfig(available_stream_configs, scalar_formats[j],
9421 gCamCapability[cameraId]->picture_sizes_tbl[i],
9422 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9423 }
9424 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009425 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009426
Thierry Strudel3d639192016-09-09 11:52:26 -07009427 break;
9428 }
9429 }
9430
9431 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9432 available_stream_configs.array(), available_stream_configs.size());
9433 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9434 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9435
9436 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9437 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9438
9439 /* android.scaler.availableMinFrameDurations */
9440 Vector<int64_t> available_min_durations;
9441 for (size_t j = 0; j < scalar_formats_count; j++) {
9442 switch (scalar_formats[j]) {
9443 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9444 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9445 case HAL_PIXEL_FORMAT_RAW10:
9446 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9447 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9448 available_min_durations.add(scalar_formats[j]);
9449 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9450 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9451 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9452 }
9453 break;
9454 default:
9455 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9456 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9457 available_min_durations.add(scalar_formats[j]);
9458 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9459 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9460 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9461 }
9462 break;
9463 }
9464 }
9465 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9466 available_min_durations.array(), available_min_durations.size());
9467
9468 Vector<int32_t> available_hfr_configs;
9469 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9470 int32_t fps = 0;
9471 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9472 case CAM_HFR_MODE_60FPS:
9473 fps = 60;
9474 break;
9475 case CAM_HFR_MODE_90FPS:
9476 fps = 90;
9477 break;
9478 case CAM_HFR_MODE_120FPS:
9479 fps = 120;
9480 break;
9481 case CAM_HFR_MODE_150FPS:
9482 fps = 150;
9483 break;
9484 case CAM_HFR_MODE_180FPS:
9485 fps = 180;
9486 break;
9487 case CAM_HFR_MODE_210FPS:
9488 fps = 210;
9489 break;
9490 case CAM_HFR_MODE_240FPS:
9491 fps = 240;
9492 break;
9493 case CAM_HFR_MODE_480FPS:
9494 fps = 480;
9495 break;
9496 case CAM_HFR_MODE_OFF:
9497 case CAM_HFR_MODE_MAX:
9498 default:
9499 break;
9500 }
9501
9502 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9503 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9504 /* For each HFR frame rate, need to advertise one variable fps range
9505 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9506 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9507 * set by the app. When video recording is started, [120, 120] is
9508 * set. This way sensor configuration does not change when recording
9509 * is started */
9510
9511 /* (width, height, fps_min, fps_max, batch_size_max) */
9512 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9513 j < MAX_SIZES_CNT; j++) {
9514 available_hfr_configs.add(
9515 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9516 available_hfr_configs.add(
9517 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9518 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9519 available_hfr_configs.add(fps);
9520 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9521
9522 /* (width, height, fps_min, fps_max, batch_size_max) */
9523 available_hfr_configs.add(
9524 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9525 available_hfr_configs.add(
9526 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9527 available_hfr_configs.add(fps);
9528 available_hfr_configs.add(fps);
9529 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9530 }
9531 }
9532 }
9533 //Advertise HFR capability only if the property is set
9534 memset(prop, 0, sizeof(prop));
9535 property_get("persist.camera.hal3hfr.enable", prop, "1");
9536 uint8_t hfrEnable = (uint8_t)atoi(prop);
9537
9538 if(hfrEnable && available_hfr_configs.array()) {
9539 staticInfo.update(
9540 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9541 available_hfr_configs.array(), available_hfr_configs.size());
9542 }
9543
9544 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9545 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9546 &max_jpeg_size, 1);
9547
9548 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9549 size_t size = 0;
9550 count = CAM_EFFECT_MODE_MAX;
9551 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9552 for (size_t i = 0; i < count; i++) {
9553 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9554 gCamCapability[cameraId]->supported_effects[i]);
9555 if (NAME_NOT_FOUND != val) {
9556 avail_effects[size] = (uint8_t)val;
9557 size++;
9558 }
9559 }
9560 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9561 avail_effects,
9562 size);
9563
9564 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9565 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9566 size_t supported_scene_modes_cnt = 0;
9567 count = CAM_SCENE_MODE_MAX;
9568 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9569 for (size_t i = 0; i < count; i++) {
9570 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9571 CAM_SCENE_MODE_OFF) {
9572 int val = lookupFwkName(SCENE_MODES_MAP,
9573 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9574 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009575
Thierry Strudel3d639192016-09-09 11:52:26 -07009576 if (NAME_NOT_FOUND != val) {
9577 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9578 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9579 supported_scene_modes_cnt++;
9580 }
9581 }
9582 }
9583 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9584 avail_scene_modes,
9585 supported_scene_modes_cnt);
9586
9587 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9588 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9589 supported_scene_modes_cnt,
9590 CAM_SCENE_MODE_MAX,
9591 scene_mode_overrides,
9592 supported_indexes,
9593 cameraId);
9594
9595 if (supported_scene_modes_cnt == 0) {
9596 supported_scene_modes_cnt = 1;
9597 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9598 }
9599
9600 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9601 scene_mode_overrides, supported_scene_modes_cnt * 3);
9602
9603 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9604 ANDROID_CONTROL_MODE_AUTO,
9605 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9606 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9607 available_control_modes,
9608 3);
9609
9610 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9611 size = 0;
9612 count = CAM_ANTIBANDING_MODE_MAX;
9613 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9614 for (size_t i = 0; i < count; i++) {
9615 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9616 gCamCapability[cameraId]->supported_antibandings[i]);
9617 if (NAME_NOT_FOUND != val) {
9618 avail_antibanding_modes[size] = (uint8_t)val;
9619 size++;
9620 }
9621
9622 }
9623 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9624 avail_antibanding_modes,
9625 size);
9626
9627 uint8_t avail_abberation_modes[] = {
9628 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9629 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9630 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9631 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9632 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9633 if (0 == count) {
9634 // If no aberration correction modes are available for a device, this advertise OFF mode
9635 size = 1;
9636 } else {
9637 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9638 // So, advertize all 3 modes if atleast any one mode is supported as per the
9639 // new M requirement
9640 size = 3;
9641 }
9642 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9643 avail_abberation_modes,
9644 size);
9645
9646 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9647 size = 0;
9648 count = CAM_FOCUS_MODE_MAX;
9649 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9650 for (size_t i = 0; i < count; i++) {
9651 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9652 gCamCapability[cameraId]->supported_focus_modes[i]);
9653 if (NAME_NOT_FOUND != val) {
9654 avail_af_modes[size] = (uint8_t)val;
9655 size++;
9656 }
9657 }
9658 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9659 avail_af_modes,
9660 size);
9661
9662 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9663 size = 0;
9664 count = CAM_WB_MODE_MAX;
9665 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9666 for (size_t i = 0; i < count; i++) {
9667 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9668 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9669 gCamCapability[cameraId]->supported_white_balances[i]);
9670 if (NAME_NOT_FOUND != val) {
9671 avail_awb_modes[size] = (uint8_t)val;
9672 size++;
9673 }
9674 }
9675 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9676 avail_awb_modes,
9677 size);
9678
9679 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9680 count = CAM_FLASH_FIRING_LEVEL_MAX;
9681 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9682 count);
9683 for (size_t i = 0; i < count; i++) {
9684 available_flash_levels[i] =
9685 gCamCapability[cameraId]->supported_firing_levels[i];
9686 }
9687 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9688 available_flash_levels, count);
9689
9690 uint8_t flashAvailable;
9691 if (gCamCapability[cameraId]->flash_available)
9692 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9693 else
9694 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9695 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9696 &flashAvailable, 1);
9697
9698 Vector<uint8_t> avail_ae_modes;
9699 count = CAM_AE_MODE_MAX;
9700 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9701 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009702 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9703 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9704 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9705 }
9706 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009707 }
9708 if (flashAvailable) {
9709 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9710 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9711 }
9712 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9713 avail_ae_modes.array(),
9714 avail_ae_modes.size());
9715
9716 int32_t sensitivity_range[2];
9717 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9718 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9719 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9720 sensitivity_range,
9721 sizeof(sensitivity_range) / sizeof(int32_t));
9722
9723 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9724 &gCamCapability[cameraId]->max_analog_sensitivity,
9725 1);
9726
9727 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9728 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9729 &sensor_orientation,
9730 1);
9731
9732 int32_t max_output_streams[] = {
9733 MAX_STALLING_STREAMS,
9734 MAX_PROCESSED_STREAMS,
9735 MAX_RAW_STREAMS};
9736 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9737 max_output_streams,
9738 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9739
9740 uint8_t avail_leds = 0;
9741 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9742 &avail_leds, 0);
9743
9744 uint8_t focus_dist_calibrated;
9745 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9746 gCamCapability[cameraId]->focus_dist_calibrated);
9747 if (NAME_NOT_FOUND != val) {
9748 focus_dist_calibrated = (uint8_t)val;
9749 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9750 &focus_dist_calibrated, 1);
9751 }
9752
9753 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9754 size = 0;
9755 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9756 MAX_TEST_PATTERN_CNT);
9757 for (size_t i = 0; i < count; i++) {
9758 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9759 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9760 if (NAME_NOT_FOUND != testpatternMode) {
9761 avail_testpattern_modes[size] = testpatternMode;
9762 size++;
9763 }
9764 }
9765 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9766 avail_testpattern_modes,
9767 size);
9768
9769 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9770 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9771 &max_pipeline_depth,
9772 1);
9773
9774 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9775 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9776 &partial_result_count,
9777 1);
9778
9779 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9780 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9781
9782 Vector<uint8_t> available_capabilities;
9783 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9784 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9785 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9786 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9787 if (supportBurst) {
9788 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9789 }
9790 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9791 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9792 if (hfrEnable && available_hfr_configs.array()) {
9793 available_capabilities.add(
9794 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9795 }
9796
9797 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9798 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9799 }
9800 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9801 available_capabilities.array(),
9802 available_capabilities.size());
9803
9804 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9805 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9806 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9807 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9808
9809 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9810 &aeLockAvailable, 1);
9811
9812 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9813 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9814 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9815 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9816
9817 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9818 &awbLockAvailable, 1);
9819
9820 int32_t max_input_streams = 1;
9821 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9822 &max_input_streams,
9823 1);
9824
9825 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9826 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9827 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9828 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9829 HAL_PIXEL_FORMAT_YCbCr_420_888};
9830 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9831 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9832
9833 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9834 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9835 &max_latency,
9836 1);
9837
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009838#ifndef USE_HAL_3_3
9839 int32_t isp_sensitivity_range[2];
9840 isp_sensitivity_range[0] =
9841 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9842 isp_sensitivity_range[1] =
9843 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9844 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9845 isp_sensitivity_range,
9846 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9847#endif
9848
Thierry Strudel3d639192016-09-09 11:52:26 -07009849 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9850 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9851 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9852 available_hot_pixel_modes,
9853 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9854
9855 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9856 ANDROID_SHADING_MODE_FAST,
9857 ANDROID_SHADING_MODE_HIGH_QUALITY};
9858 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9859 available_shading_modes,
9860 3);
9861
9862 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9863 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9864 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9865 available_lens_shading_map_modes,
9866 2);
9867
9868 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9869 ANDROID_EDGE_MODE_FAST,
9870 ANDROID_EDGE_MODE_HIGH_QUALITY,
9871 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9872 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9873 available_edge_modes,
9874 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9875
9876 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9877 ANDROID_NOISE_REDUCTION_MODE_FAST,
9878 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9879 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9880 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9881 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9882 available_noise_red_modes,
9883 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9884
9885 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9886 ANDROID_TONEMAP_MODE_FAST,
9887 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9888 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9889 available_tonemap_modes,
9890 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9891
9892 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9893 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9894 available_hot_pixel_map_modes,
9895 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9896
9897 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9898 gCamCapability[cameraId]->reference_illuminant1);
9899 if (NAME_NOT_FOUND != val) {
9900 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9901 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9902 }
9903
9904 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9905 gCamCapability[cameraId]->reference_illuminant2);
9906 if (NAME_NOT_FOUND != val) {
9907 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9908 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9909 }
9910
9911 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9912 (void *)gCamCapability[cameraId]->forward_matrix1,
9913 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9914
9915 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9916 (void *)gCamCapability[cameraId]->forward_matrix2,
9917 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9918
9919 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9920 (void *)gCamCapability[cameraId]->color_transform1,
9921 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9922
9923 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9924 (void *)gCamCapability[cameraId]->color_transform2,
9925 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9926
9927 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9928 (void *)gCamCapability[cameraId]->calibration_transform1,
9929 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9930
9931 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9932 (void *)gCamCapability[cameraId]->calibration_transform2,
9933 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9934
9935 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9936 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9937 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9938 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9939 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9940 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9941 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9942 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9943 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9944 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9945 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9946 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9947 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9948 ANDROID_JPEG_GPS_COORDINATES,
9949 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9950 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9951 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9952 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9953 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9954 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9955 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9956 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9957 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9958 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009959#ifndef USE_HAL_3_3
9960 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9961#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009962 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009963 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009964 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9965 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009966 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009967 /* DevCamDebug metadata request_keys_basic */
9968 DEVCAMDEBUG_META_ENABLE,
9969 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009970 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07009971 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07009972 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -07009973 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -08009974 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009975
9976 size_t request_keys_cnt =
9977 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9978 Vector<int32_t> available_request_keys;
9979 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9980 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9981 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9982 }
9983
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009984 if (gExposeEnableZslKey) {
Chien-Yu Chened0a4c92017-05-01 18:25:03 +00009985 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009986 }
9987
Thierry Strudel3d639192016-09-09 11:52:26 -07009988 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9989 available_request_keys.array(), available_request_keys.size());
9990
9991 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9992 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9993 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9994 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9995 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9996 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9997 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9998 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9999 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10000 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10001 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10002 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10003 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10004 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10005 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10006 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10007 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010008 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010009 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10010 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10011 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010012 ANDROID_STATISTICS_FACE_SCORES,
10013#ifndef USE_HAL_3_3
10014 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10015#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010016 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010017 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010018 // DevCamDebug metadata result_keys_basic
10019 DEVCAMDEBUG_META_ENABLE,
10020 // DevCamDebug metadata result_keys AF
10021 DEVCAMDEBUG_AF_LENS_POSITION,
10022 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10023 DEVCAMDEBUG_AF_TOF_DISTANCE,
10024 DEVCAMDEBUG_AF_LUMA,
10025 DEVCAMDEBUG_AF_HAF_STATE,
10026 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10027 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10028 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10029 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10030 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10031 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10032 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10033 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10034 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10035 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10036 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10037 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10038 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10039 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10040 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10041 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10042 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10043 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10044 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10045 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10046 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10047 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10048 // DevCamDebug metadata result_keys AEC
10049 DEVCAMDEBUG_AEC_TARGET_LUMA,
10050 DEVCAMDEBUG_AEC_COMP_LUMA,
10051 DEVCAMDEBUG_AEC_AVG_LUMA,
10052 DEVCAMDEBUG_AEC_CUR_LUMA,
10053 DEVCAMDEBUG_AEC_LINECOUNT,
10054 DEVCAMDEBUG_AEC_REAL_GAIN,
10055 DEVCAMDEBUG_AEC_EXP_INDEX,
10056 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010057 // DevCamDebug metadata result_keys zzHDR
10058 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10059 DEVCAMDEBUG_AEC_L_LINECOUNT,
10060 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10061 DEVCAMDEBUG_AEC_S_LINECOUNT,
10062 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10063 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10064 // DevCamDebug metadata result_keys ADRC
10065 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10066 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10067 DEVCAMDEBUG_AEC_GTM_RATIO,
10068 DEVCAMDEBUG_AEC_LTM_RATIO,
10069 DEVCAMDEBUG_AEC_LA_RATIO,
10070 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010071 // DevCamDebug metadata result_keys AWB
10072 DEVCAMDEBUG_AWB_R_GAIN,
10073 DEVCAMDEBUG_AWB_G_GAIN,
10074 DEVCAMDEBUG_AWB_B_GAIN,
10075 DEVCAMDEBUG_AWB_CCT,
10076 DEVCAMDEBUG_AWB_DECISION,
10077 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010078 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10079 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10080 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010081 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010082 };
10083
Thierry Strudel3d639192016-09-09 11:52:26 -070010084 size_t result_keys_cnt =
10085 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10086
10087 Vector<int32_t> available_result_keys;
10088 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10089 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10090 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10091 }
10092 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10093 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10094 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10095 }
10096 if (supportedFaceDetectMode == 1) {
10097 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10098 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10099 } else if ((supportedFaceDetectMode == 2) ||
10100 (supportedFaceDetectMode == 3)) {
10101 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10102 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10103 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010104#ifndef USE_HAL_3_3
10105 if (hasBlackRegions) {
10106 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10107 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10108 }
10109#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010110
10111 if (gExposeEnableZslKey) {
10112 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10113 }
10114
Thierry Strudel3d639192016-09-09 11:52:26 -070010115 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10116 available_result_keys.array(), available_result_keys.size());
10117
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010118 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010119 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10120 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10121 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10122 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10123 ANDROID_SCALER_CROPPING_TYPE,
10124 ANDROID_SYNC_MAX_LATENCY,
10125 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10126 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10127 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10128 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10129 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10130 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10131 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10132 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10133 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10134 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10135 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10136 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10137 ANDROID_LENS_FACING,
10138 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10139 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10140 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10141 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10142 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10143 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10144 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10145 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10146 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10147 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10148 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10149 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10150 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10151 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10152 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10153 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10154 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10155 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10156 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10157 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010158 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010159 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10160 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10161 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10162 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10163 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10164 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10165 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10166 ANDROID_CONTROL_AVAILABLE_MODES,
10167 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10168 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10169 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10170 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010171 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10172#ifndef USE_HAL_3_3
10173 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10174 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10175#endif
10176 };
10177
10178 Vector<int32_t> available_characteristics_keys;
10179 available_characteristics_keys.appendArray(characteristics_keys_basic,
10180 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10181#ifndef USE_HAL_3_3
10182 if (hasBlackRegions) {
10183 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10184 }
10185#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010186
10187 if (0 <= indexPD) {
10188 int32_t depthKeys[] = {
10189 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10190 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10191 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10192 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10193 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10194 };
10195 available_characteristics_keys.appendArray(depthKeys,
10196 sizeof(depthKeys) / sizeof(depthKeys[0]));
10197 }
10198
Thierry Strudel3d639192016-09-09 11:52:26 -070010199 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010200 available_characteristics_keys.array(),
10201 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010202
10203 /*available stall durations depend on the hw + sw and will be different for different devices */
10204 /*have to add for raw after implementation*/
10205 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10206 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10207
10208 Vector<int64_t> available_stall_durations;
10209 for (uint32_t j = 0; j < stall_formats_count; j++) {
10210 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10211 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10212 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10213 available_stall_durations.add(stall_formats[j]);
10214 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10215 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10216 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10217 }
10218 } else {
10219 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10220 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10221 available_stall_durations.add(stall_formats[j]);
10222 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10223 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10224 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10225 }
10226 }
10227 }
10228 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10229 available_stall_durations.array(),
10230 available_stall_durations.size());
10231
10232 //QCAMERA3_OPAQUE_RAW
10233 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10234 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10235 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10236 case LEGACY_RAW:
10237 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10238 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10239 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10240 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10241 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10242 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10243 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10244 break;
10245 case MIPI_RAW:
10246 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10247 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10248 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10249 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10250 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10251 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10252 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10253 break;
10254 default:
10255 LOGE("unknown opaque_raw_format %d",
10256 gCamCapability[cameraId]->opaque_raw_fmt);
10257 break;
10258 }
10259 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10260
10261 Vector<int32_t> strides;
10262 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10263 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10264 cam_stream_buf_plane_info_t buf_planes;
10265 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10266 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10267 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10268 &gCamCapability[cameraId]->padding_info, &buf_planes);
10269 strides.add(buf_planes.plane_info.mp[0].stride);
10270 }
10271 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10272 strides.size());
10273
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010274 //TBD: remove the following line once backend advertises zzHDR in feature mask
10275 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010276 //Video HDR default
10277 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10278 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010279 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010280 int32_t vhdr_mode[] = {
10281 QCAMERA3_VIDEO_HDR_MODE_OFF,
10282 QCAMERA3_VIDEO_HDR_MODE_ON};
10283
10284 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10285 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10286 vhdr_mode, vhdr_mode_count);
10287 }
10288
Thierry Strudel3d639192016-09-09 11:52:26 -070010289 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10290 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10291 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10292
10293 uint8_t isMonoOnly =
10294 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10295 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10296 &isMonoOnly, 1);
10297
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010298#ifndef USE_HAL_3_3
10299 Vector<int32_t> opaque_size;
10300 for (size_t j = 0; j < scalar_formats_count; j++) {
10301 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10302 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10303 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10304 cam_stream_buf_plane_info_t buf_planes;
10305
10306 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10307 &gCamCapability[cameraId]->padding_info, &buf_planes);
10308
10309 if (rc == 0) {
10310 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10311 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10312 opaque_size.add(buf_planes.plane_info.frame_len);
10313 }else {
10314 LOGE("raw frame calculation failed!");
10315 }
10316 }
10317 }
10318 }
10319
10320 if ((opaque_size.size() > 0) &&
10321 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10322 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10323 else
10324 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10325#endif
10326
Thierry Strudel04e026f2016-10-10 11:27:36 -070010327 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10328 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10329 size = 0;
10330 count = CAM_IR_MODE_MAX;
10331 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10332 for (size_t i = 0; i < count; i++) {
10333 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10334 gCamCapability[cameraId]->supported_ir_modes[i]);
10335 if (NAME_NOT_FOUND != val) {
10336 avail_ir_modes[size] = (int32_t)val;
10337 size++;
10338 }
10339 }
10340 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10341 avail_ir_modes, size);
10342 }
10343
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010344 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10345 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10346 size = 0;
10347 count = CAM_AEC_CONVERGENCE_MAX;
10348 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10349 for (size_t i = 0; i < count; i++) {
10350 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10351 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10352 if (NAME_NOT_FOUND != val) {
10353 available_instant_aec_modes[size] = (int32_t)val;
10354 size++;
10355 }
10356 }
10357 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10358 available_instant_aec_modes, size);
10359 }
10360
Thierry Strudel54dc9782017-02-15 12:12:10 -080010361 int32_t sharpness_range[] = {
10362 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10363 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10364 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10365
10366 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10367 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10368 size = 0;
10369 count = CAM_BINNING_CORRECTION_MODE_MAX;
10370 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10371 for (size_t i = 0; i < count; i++) {
10372 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10373 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10374 gCamCapability[cameraId]->supported_binning_modes[i]);
10375 if (NAME_NOT_FOUND != val) {
10376 avail_binning_modes[size] = (int32_t)val;
10377 size++;
10378 }
10379 }
10380 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10381 avail_binning_modes, size);
10382 }
10383
10384 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10385 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10386 size = 0;
10387 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10388 for (size_t i = 0; i < count; i++) {
10389 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10390 gCamCapability[cameraId]->supported_aec_modes[i]);
10391 if (NAME_NOT_FOUND != val)
10392 available_aec_modes[size++] = val;
10393 }
10394 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10395 available_aec_modes, size);
10396 }
10397
10398 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10399 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10400 size = 0;
10401 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10402 for (size_t i = 0; i < count; i++) {
10403 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10404 gCamCapability[cameraId]->supported_iso_modes[i]);
10405 if (NAME_NOT_FOUND != val)
10406 available_iso_modes[size++] = val;
10407 }
10408 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10409 available_iso_modes, size);
10410 }
10411
10412 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010413 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010414 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10415 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10416 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10417
10418 int32_t available_saturation_range[4];
10419 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10420 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10421 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10422 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10423 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10424 available_saturation_range, 4);
10425
10426 uint8_t is_hdr_values[2];
10427 is_hdr_values[0] = 0;
10428 is_hdr_values[1] = 1;
10429 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10430 is_hdr_values, 2);
10431
10432 float is_hdr_confidence_range[2];
10433 is_hdr_confidence_range[0] = 0.0;
10434 is_hdr_confidence_range[1] = 1.0;
10435 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10436 is_hdr_confidence_range, 2);
10437
Emilian Peev0a972ef2017-03-16 10:25:53 +000010438 size_t eepromLength = strnlen(
10439 reinterpret_cast<const char *>(
10440 gCamCapability[cameraId]->eeprom_version_info),
10441 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10442 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010443 char easelInfo[] = ",E:N";
10444 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10445 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10446 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010447 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10448 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010449 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010450 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10451 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10452 }
10453
Thierry Strudel3d639192016-09-09 11:52:26 -070010454 gStaticMetadata[cameraId] = staticInfo.release();
10455 return rc;
10456}
10457
10458/*===========================================================================
10459 * FUNCTION : makeTable
10460 *
10461 * DESCRIPTION: make a table of sizes
10462 *
10463 * PARAMETERS :
10464 *
10465 *
10466 *==========================================================================*/
10467void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10468 size_t max_size, int32_t *sizeTable)
10469{
10470 size_t j = 0;
10471 if (size > max_size) {
10472 size = max_size;
10473 }
10474 for (size_t i = 0; i < size; i++) {
10475 sizeTable[j] = dimTable[i].width;
10476 sizeTable[j+1] = dimTable[i].height;
10477 j+=2;
10478 }
10479}
10480
10481/*===========================================================================
10482 * FUNCTION : makeFPSTable
10483 *
10484 * DESCRIPTION: make a table of fps ranges
10485 *
10486 * PARAMETERS :
10487 *
10488 *==========================================================================*/
10489void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10490 size_t max_size, int32_t *fpsRangesTable)
10491{
10492 size_t j = 0;
10493 if (size > max_size) {
10494 size = max_size;
10495 }
10496 for (size_t i = 0; i < size; i++) {
10497 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10498 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10499 j+=2;
10500 }
10501}
10502
10503/*===========================================================================
10504 * FUNCTION : makeOverridesList
10505 *
10506 * DESCRIPTION: make a list of scene mode overrides
10507 *
10508 * PARAMETERS :
10509 *
10510 *
10511 *==========================================================================*/
10512void QCamera3HardwareInterface::makeOverridesList(
10513 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10514 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10515{
10516 /*daemon will give a list of overrides for all scene modes.
10517 However we should send the fwk only the overrides for the scene modes
10518 supported by the framework*/
10519 size_t j = 0;
10520 if (size > max_size) {
10521 size = max_size;
10522 }
10523 size_t focus_count = CAM_FOCUS_MODE_MAX;
10524 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10525 focus_count);
10526 for (size_t i = 0; i < size; i++) {
10527 bool supt = false;
10528 size_t index = supported_indexes[i];
10529 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10530 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10531 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10532 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10533 overridesTable[index].awb_mode);
10534 if (NAME_NOT_FOUND != val) {
10535 overridesList[j+1] = (uint8_t)val;
10536 }
10537 uint8_t focus_override = overridesTable[index].af_mode;
10538 for (size_t k = 0; k < focus_count; k++) {
10539 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10540 supt = true;
10541 break;
10542 }
10543 }
10544 if (supt) {
10545 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10546 focus_override);
10547 if (NAME_NOT_FOUND != val) {
10548 overridesList[j+2] = (uint8_t)val;
10549 }
10550 } else {
10551 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10552 }
10553 j+=3;
10554 }
10555}
10556
10557/*===========================================================================
10558 * FUNCTION : filterJpegSizes
10559 *
10560 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10561 * could be downscaled to
10562 *
10563 * PARAMETERS :
10564 *
10565 * RETURN : length of jpegSizes array
10566 *==========================================================================*/
10567
10568size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10569 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10570 uint8_t downscale_factor)
10571{
10572 if (0 == downscale_factor) {
10573 downscale_factor = 1;
10574 }
10575
10576 int32_t min_width = active_array_size.width / downscale_factor;
10577 int32_t min_height = active_array_size.height / downscale_factor;
10578 size_t jpegSizesCnt = 0;
10579 if (processedSizesCnt > maxCount) {
10580 processedSizesCnt = maxCount;
10581 }
10582 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10583 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10584 jpegSizes[jpegSizesCnt] = processedSizes[i];
10585 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10586 jpegSizesCnt += 2;
10587 }
10588 }
10589 return jpegSizesCnt;
10590}
10591
10592/*===========================================================================
10593 * FUNCTION : computeNoiseModelEntryS
10594 *
10595 * DESCRIPTION: function to map a given sensitivity to the S noise
10596 * model parameters in the DNG noise model.
10597 *
10598 * PARAMETERS : sens : the sensor sensitivity
10599 *
10600 ** RETURN : S (sensor amplification) noise
10601 *
10602 *==========================================================================*/
10603double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10604 double s = gCamCapability[mCameraId]->gradient_S * sens +
10605 gCamCapability[mCameraId]->offset_S;
10606 return ((s < 0.0) ? 0.0 : s);
10607}
10608
10609/*===========================================================================
10610 * FUNCTION : computeNoiseModelEntryO
10611 *
10612 * DESCRIPTION: function to map a given sensitivity to the O noise
10613 * model parameters in the DNG noise model.
10614 *
10615 * PARAMETERS : sens : the sensor sensitivity
10616 *
10617 ** RETURN : O (sensor readout) noise
10618 *
10619 *==========================================================================*/
10620double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10621 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10622 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10623 1.0 : (1.0 * sens / max_analog_sens);
10624 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10625 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10626 return ((o < 0.0) ? 0.0 : o);
10627}
10628
10629/*===========================================================================
10630 * FUNCTION : getSensorSensitivity
10631 *
10632 * DESCRIPTION: convert iso_mode to an integer value
10633 *
10634 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10635 *
10636 ** RETURN : sensitivity supported by sensor
10637 *
10638 *==========================================================================*/
10639int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10640{
10641 int32_t sensitivity;
10642
10643 switch (iso_mode) {
10644 case CAM_ISO_MODE_100:
10645 sensitivity = 100;
10646 break;
10647 case CAM_ISO_MODE_200:
10648 sensitivity = 200;
10649 break;
10650 case CAM_ISO_MODE_400:
10651 sensitivity = 400;
10652 break;
10653 case CAM_ISO_MODE_800:
10654 sensitivity = 800;
10655 break;
10656 case CAM_ISO_MODE_1600:
10657 sensitivity = 1600;
10658 break;
10659 default:
10660 sensitivity = -1;
10661 break;
10662 }
10663 return sensitivity;
10664}
10665
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010666int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010667 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010668 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10669 // to connect to Easel.
10670 bool doNotpowerOnEasel =
10671 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10672
10673 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010674 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10675 return OK;
10676 }
10677
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010678 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010679 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010680 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010681 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010682 return res;
10683 }
10684
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010685 EaselManagerClientOpened = true;
10686
10687 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010688 if (res != OK) {
10689 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10690 }
10691
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010692 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010693 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010694
10695 // Expose enableZsl key only when HDR+ mode is enabled.
10696 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010697 }
10698
10699 return OK;
10700}
10701
Thierry Strudel3d639192016-09-09 11:52:26 -070010702/*===========================================================================
10703 * FUNCTION : getCamInfo
10704 *
10705 * DESCRIPTION: query camera capabilities
10706 *
10707 * PARAMETERS :
10708 * @cameraId : camera Id
10709 * @info : camera info struct to be filled in with camera capabilities
10710 *
10711 * RETURN : int type of status
10712 * NO_ERROR -- success
10713 * none-zero failure code
10714 *==========================================================================*/
10715int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10716 struct camera_info *info)
10717{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010718 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010719 int rc = 0;
10720
10721 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010722
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010723 {
10724 Mutex::Autolock l(gHdrPlusClientLock);
10725 rc = initHdrPlusClientLocked();
10726 if (rc != OK) {
10727 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10728 pthread_mutex_unlock(&gCamLock);
10729 return rc;
10730 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010731 }
10732
Thierry Strudel3d639192016-09-09 11:52:26 -070010733 if (NULL == gCamCapability[cameraId]) {
10734 rc = initCapabilities(cameraId);
10735 if (rc < 0) {
10736 pthread_mutex_unlock(&gCamLock);
10737 return rc;
10738 }
10739 }
10740
10741 if (NULL == gStaticMetadata[cameraId]) {
10742 rc = initStaticMetadata(cameraId);
10743 if (rc < 0) {
10744 pthread_mutex_unlock(&gCamLock);
10745 return rc;
10746 }
10747 }
10748
10749 switch(gCamCapability[cameraId]->position) {
10750 case CAM_POSITION_BACK:
10751 case CAM_POSITION_BACK_AUX:
10752 info->facing = CAMERA_FACING_BACK;
10753 break;
10754
10755 case CAM_POSITION_FRONT:
10756 case CAM_POSITION_FRONT_AUX:
10757 info->facing = CAMERA_FACING_FRONT;
10758 break;
10759
10760 default:
10761 LOGE("Unknown position type %d for camera id:%d",
10762 gCamCapability[cameraId]->position, cameraId);
10763 rc = -1;
10764 break;
10765 }
10766
10767
10768 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010769#ifndef USE_HAL_3_3
10770 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10771#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010772 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010773#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010774 info->static_camera_characteristics = gStaticMetadata[cameraId];
10775
10776 //For now assume both cameras can operate independently.
10777 info->conflicting_devices = NULL;
10778 info->conflicting_devices_length = 0;
10779
10780 //resource cost is 100 * MIN(1.0, m/M),
10781 //where m is throughput requirement with maximum stream configuration
10782 //and M is CPP maximum throughput.
10783 float max_fps = 0.0;
10784 for (uint32_t i = 0;
10785 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10786 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10787 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10788 }
10789 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10790 gCamCapability[cameraId]->active_array_size.width *
10791 gCamCapability[cameraId]->active_array_size.height * max_fps /
10792 gCamCapability[cameraId]->max_pixel_bandwidth;
10793 info->resource_cost = 100 * MIN(1.0, ratio);
10794 LOGI("camera %d resource cost is %d", cameraId,
10795 info->resource_cost);
10796
10797 pthread_mutex_unlock(&gCamLock);
10798 return rc;
10799}
10800
10801/*===========================================================================
10802 * FUNCTION : translateCapabilityToMetadata
10803 *
10804 * DESCRIPTION: translate the capability into camera_metadata_t
10805 *
10806 * PARAMETERS : type of the request
10807 *
10808 *
10809 * RETURN : success: camera_metadata_t*
10810 * failure: NULL
10811 *
10812 *==========================================================================*/
10813camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10814{
10815 if (mDefaultMetadata[type] != NULL) {
10816 return mDefaultMetadata[type];
10817 }
10818 //first time we are handling this request
10819 //fill up the metadata structure using the wrapper class
10820 CameraMetadata settings;
10821 //translate from cam_capability_t to camera_metadata_tag_t
10822 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10823 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10824 int32_t defaultRequestID = 0;
10825 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10826
10827 /* OIS disable */
10828 char ois_prop[PROPERTY_VALUE_MAX];
10829 memset(ois_prop, 0, sizeof(ois_prop));
10830 property_get("persist.camera.ois.disable", ois_prop, "0");
10831 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10832
10833 /* Force video to use OIS */
10834 char videoOisProp[PROPERTY_VALUE_MAX];
10835 memset(videoOisProp, 0, sizeof(videoOisProp));
10836 property_get("persist.camera.ois.video", videoOisProp, "1");
10837 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010838
10839 // Hybrid AE enable/disable
10840 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10841 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10842 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10843 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10844
Thierry Strudel3d639192016-09-09 11:52:26 -070010845 uint8_t controlIntent = 0;
10846 uint8_t focusMode;
10847 uint8_t vsMode;
10848 uint8_t optStabMode;
10849 uint8_t cacMode;
10850 uint8_t edge_mode;
10851 uint8_t noise_red_mode;
10852 uint8_t tonemap_mode;
10853 bool highQualityModeEntryAvailable = FALSE;
10854 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010855 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010856 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10857 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010858 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010859 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010860 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010861
Thierry Strudel3d639192016-09-09 11:52:26 -070010862 switch (type) {
10863 case CAMERA3_TEMPLATE_PREVIEW:
10864 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10865 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10866 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10867 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10868 edge_mode = ANDROID_EDGE_MODE_FAST;
10869 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10870 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10871 break;
10872 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10873 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10874 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10875 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10876 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10877 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10878 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10879 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10880 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10881 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10882 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10883 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10884 highQualityModeEntryAvailable = TRUE;
10885 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10886 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10887 fastModeEntryAvailable = TRUE;
10888 }
10889 }
10890 if (highQualityModeEntryAvailable) {
10891 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10892 } else if (fastModeEntryAvailable) {
10893 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10894 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010895 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10896 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10897 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010898 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010899 break;
10900 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10901 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10902 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10903 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010904 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10905 edge_mode = ANDROID_EDGE_MODE_FAST;
10906 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10907 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10908 if (forceVideoOis)
10909 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10910 break;
10911 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10912 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10913 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10914 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010915 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10916 edge_mode = ANDROID_EDGE_MODE_FAST;
10917 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10918 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10919 if (forceVideoOis)
10920 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10921 break;
10922 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10923 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10924 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10925 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10926 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10927 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10928 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10929 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10930 break;
10931 case CAMERA3_TEMPLATE_MANUAL:
10932 edge_mode = ANDROID_EDGE_MODE_FAST;
10933 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10934 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10935 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10936 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10937 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10938 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10939 break;
10940 default:
10941 edge_mode = ANDROID_EDGE_MODE_FAST;
10942 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10943 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10944 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10945 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10946 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10947 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10948 break;
10949 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010950 // Set CAC to OFF if underlying device doesn't support
10951 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10952 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10953 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010954 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10955 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10956 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10957 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10958 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10959 }
10960 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010961 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010962 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010963
10964 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10965 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10966 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10967 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10968 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10969 || ois_disable)
10970 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10971 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010972 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010973
10974 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10975 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10976
10977 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10978 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10979
10980 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10981 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10982
10983 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10984 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10985
10986 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10987 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10988
10989 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10990 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10991
10992 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10993 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10994
10995 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10996 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10997
10998 /*flash*/
10999 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11000 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11001
11002 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11003 settings.update(ANDROID_FLASH_FIRING_POWER,
11004 &flashFiringLevel, 1);
11005
11006 /* lens */
11007 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11008 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11009
11010 if (gCamCapability[mCameraId]->filter_densities_count) {
11011 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11012 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11013 gCamCapability[mCameraId]->filter_densities_count);
11014 }
11015
11016 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11017 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11018
Thierry Strudel3d639192016-09-09 11:52:26 -070011019 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11020 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11021
11022 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11023 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11024
11025 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11026 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11027
11028 /* face detection (default to OFF) */
11029 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11030 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11031
Thierry Strudel54dc9782017-02-15 12:12:10 -080011032 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11033 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011034
11035 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11036 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11037
11038 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11039 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11040
Thierry Strudel3d639192016-09-09 11:52:26 -070011041
11042 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11043 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11044
11045 /* Exposure time(Update the Min Exposure Time)*/
11046 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11047 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11048
11049 /* frame duration */
11050 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11051 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11052
11053 /* sensitivity */
11054 static const int32_t default_sensitivity = 100;
11055 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011056#ifndef USE_HAL_3_3
11057 static const int32_t default_isp_sensitivity =
11058 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11059 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11060#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011061
11062 /*edge mode*/
11063 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11064
11065 /*noise reduction mode*/
11066 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11067
11068 /*color correction mode*/
11069 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11070 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11071
11072 /*transform matrix mode*/
11073 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11074
11075 int32_t scaler_crop_region[4];
11076 scaler_crop_region[0] = 0;
11077 scaler_crop_region[1] = 0;
11078 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11079 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11080 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11081
11082 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11083 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11084
11085 /*focus distance*/
11086 float focus_distance = 0.0;
11087 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11088
11089 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011090 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011091 float max_range = 0.0;
11092 float max_fixed_fps = 0.0;
11093 int32_t fps_range[2] = {0, 0};
11094 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11095 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011096 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11097 TEMPLATE_MAX_PREVIEW_FPS) {
11098 continue;
11099 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011100 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11101 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11102 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11103 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11104 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11105 if (range > max_range) {
11106 fps_range[0] =
11107 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11108 fps_range[1] =
11109 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11110 max_range = range;
11111 }
11112 } else {
11113 if (range < 0.01 && max_fixed_fps <
11114 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11115 fps_range[0] =
11116 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11117 fps_range[1] =
11118 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11119 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11120 }
11121 }
11122 }
11123 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11124
11125 /*precapture trigger*/
11126 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11127 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11128
11129 /*af trigger*/
11130 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11131 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11132
11133 /* ae & af regions */
11134 int32_t active_region[] = {
11135 gCamCapability[mCameraId]->active_array_size.left,
11136 gCamCapability[mCameraId]->active_array_size.top,
11137 gCamCapability[mCameraId]->active_array_size.left +
11138 gCamCapability[mCameraId]->active_array_size.width,
11139 gCamCapability[mCameraId]->active_array_size.top +
11140 gCamCapability[mCameraId]->active_array_size.height,
11141 0};
11142 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11143 sizeof(active_region) / sizeof(active_region[0]));
11144 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11145 sizeof(active_region) / sizeof(active_region[0]));
11146
11147 /* black level lock */
11148 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11149 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11150
Thierry Strudel3d639192016-09-09 11:52:26 -070011151 //special defaults for manual template
11152 if (type == CAMERA3_TEMPLATE_MANUAL) {
11153 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11154 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11155
11156 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11157 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11158
11159 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11160 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11161
11162 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11163 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11164
11165 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11166 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11167
11168 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11169 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11170 }
11171
11172
11173 /* TNR
11174 * We'll use this location to determine which modes TNR will be set.
11175 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11176 * This is not to be confused with linking on a per stream basis that decision
11177 * is still on per-session basis and will be handled as part of config stream
11178 */
11179 uint8_t tnr_enable = 0;
11180
11181 if (m_bTnrPreview || m_bTnrVideo) {
11182
11183 switch (type) {
11184 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11185 tnr_enable = 1;
11186 break;
11187
11188 default:
11189 tnr_enable = 0;
11190 break;
11191 }
11192
11193 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11194 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11195 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11196
11197 LOGD("TNR:%d with process plate %d for template:%d",
11198 tnr_enable, tnr_process_type, type);
11199 }
11200
11201 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011202 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011203 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11204
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011205 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011206 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11207
Shuzhen Wang920ea402017-05-03 08:49:39 -070011208 uint8_t related_camera_id = mCameraId;
11209 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011210
11211 /* CDS default */
11212 char prop[PROPERTY_VALUE_MAX];
11213 memset(prop, 0, sizeof(prop));
11214 property_get("persist.camera.CDS", prop, "Auto");
11215 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11216 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11217 if (CAM_CDS_MODE_MAX == cds_mode) {
11218 cds_mode = CAM_CDS_MODE_AUTO;
11219 }
11220
11221 /* Disabling CDS in templates which have TNR enabled*/
11222 if (tnr_enable)
11223 cds_mode = CAM_CDS_MODE_OFF;
11224
11225 int32_t mode = cds_mode;
11226 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011227
Thierry Strudel269c81a2016-10-12 12:13:59 -070011228 /* Manual Convergence AEC Speed is disabled by default*/
11229 float default_aec_speed = 0;
11230 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11231
11232 /* Manual Convergence AWB Speed is disabled by default*/
11233 float default_awb_speed = 0;
11234 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11235
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011236 // Set instant AEC to normal convergence by default
11237 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11238 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11239
Shuzhen Wang19463d72016-03-08 11:09:52 -080011240 /* hybrid ae */
11241 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11242
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011243 if (gExposeEnableZslKey) {
11244 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11245 }
11246
Thierry Strudel3d639192016-09-09 11:52:26 -070011247 mDefaultMetadata[type] = settings.release();
11248
11249 return mDefaultMetadata[type];
11250}
11251
11252/*===========================================================================
11253 * FUNCTION : setFrameParameters
11254 *
11255 * DESCRIPTION: set parameters per frame as requested in the metadata from
11256 * framework
11257 *
11258 * PARAMETERS :
11259 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011260 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011261 * @blob_request: Whether this request is a blob request or not
11262 *
11263 * RETURN : success: NO_ERROR
11264 * failure:
11265 *==========================================================================*/
11266int QCamera3HardwareInterface::setFrameParameters(
11267 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011268 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011269 int blob_request,
11270 uint32_t snapshotStreamId)
11271{
11272 /*translate from camera_metadata_t type to parm_type_t*/
11273 int rc = 0;
11274 int32_t hal_version = CAM_HAL_V3;
11275
11276 clear_metadata_buffer(mParameters);
11277 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11278 LOGE("Failed to set hal version in the parameters");
11279 return BAD_VALUE;
11280 }
11281
11282 /*we need to update the frame number in the parameters*/
11283 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11284 request->frame_number)) {
11285 LOGE("Failed to set the frame number in the parameters");
11286 return BAD_VALUE;
11287 }
11288
11289 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011290 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011291 LOGE("Failed to set stream type mask in the parameters");
11292 return BAD_VALUE;
11293 }
11294
11295 if (mUpdateDebugLevel) {
11296 uint32_t dummyDebugLevel = 0;
11297 /* The value of dummyDebugLevel is irrelavent. On
11298 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11299 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11300 dummyDebugLevel)) {
11301 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11302 return BAD_VALUE;
11303 }
11304 mUpdateDebugLevel = false;
11305 }
11306
11307 if(request->settings != NULL){
11308 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11309 if (blob_request)
11310 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11311 }
11312
11313 return rc;
11314}
11315
11316/*===========================================================================
11317 * FUNCTION : setReprocParameters
11318 *
11319 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11320 * return it.
11321 *
11322 * PARAMETERS :
11323 * @request : request that needs to be serviced
11324 *
11325 * RETURN : success: NO_ERROR
11326 * failure:
11327 *==========================================================================*/
11328int32_t QCamera3HardwareInterface::setReprocParameters(
11329 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11330 uint32_t snapshotStreamId)
11331{
11332 /*translate from camera_metadata_t type to parm_type_t*/
11333 int rc = 0;
11334
11335 if (NULL == request->settings){
11336 LOGE("Reprocess settings cannot be NULL");
11337 return BAD_VALUE;
11338 }
11339
11340 if (NULL == reprocParam) {
11341 LOGE("Invalid reprocessing metadata buffer");
11342 return BAD_VALUE;
11343 }
11344 clear_metadata_buffer(reprocParam);
11345
11346 /*we need to update the frame number in the parameters*/
11347 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11348 request->frame_number)) {
11349 LOGE("Failed to set the frame number in the parameters");
11350 return BAD_VALUE;
11351 }
11352
11353 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11354 if (rc < 0) {
11355 LOGE("Failed to translate reproc request");
11356 return rc;
11357 }
11358
11359 CameraMetadata frame_settings;
11360 frame_settings = request->settings;
11361 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11362 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11363 int32_t *crop_count =
11364 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11365 int32_t *crop_data =
11366 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11367 int32_t *roi_map =
11368 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11369 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11370 cam_crop_data_t crop_meta;
11371 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11372 crop_meta.num_of_streams = 1;
11373 crop_meta.crop_info[0].crop.left = crop_data[0];
11374 crop_meta.crop_info[0].crop.top = crop_data[1];
11375 crop_meta.crop_info[0].crop.width = crop_data[2];
11376 crop_meta.crop_info[0].crop.height = crop_data[3];
11377
11378 crop_meta.crop_info[0].roi_map.left =
11379 roi_map[0];
11380 crop_meta.crop_info[0].roi_map.top =
11381 roi_map[1];
11382 crop_meta.crop_info[0].roi_map.width =
11383 roi_map[2];
11384 crop_meta.crop_info[0].roi_map.height =
11385 roi_map[3];
11386
11387 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11388 rc = BAD_VALUE;
11389 }
11390 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11391 request->input_buffer->stream,
11392 crop_meta.crop_info[0].crop.left,
11393 crop_meta.crop_info[0].crop.top,
11394 crop_meta.crop_info[0].crop.width,
11395 crop_meta.crop_info[0].crop.height);
11396 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11397 request->input_buffer->stream,
11398 crop_meta.crop_info[0].roi_map.left,
11399 crop_meta.crop_info[0].roi_map.top,
11400 crop_meta.crop_info[0].roi_map.width,
11401 crop_meta.crop_info[0].roi_map.height);
11402 } else {
11403 LOGE("Invalid reprocess crop count %d!", *crop_count);
11404 }
11405 } else {
11406 LOGE("No crop data from matching output stream");
11407 }
11408
11409 /* These settings are not needed for regular requests so handle them specially for
11410 reprocess requests; information needed for EXIF tags */
11411 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11412 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11413 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11414 if (NAME_NOT_FOUND != val) {
11415 uint32_t flashMode = (uint32_t)val;
11416 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11417 rc = BAD_VALUE;
11418 }
11419 } else {
11420 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11421 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11422 }
11423 } else {
11424 LOGH("No flash mode in reprocess settings");
11425 }
11426
11427 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11428 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11429 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11430 rc = BAD_VALUE;
11431 }
11432 } else {
11433 LOGH("No flash state in reprocess settings");
11434 }
11435
11436 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11437 uint8_t *reprocessFlags =
11438 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11439 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11440 *reprocessFlags)) {
11441 rc = BAD_VALUE;
11442 }
11443 }
11444
Thierry Strudel54dc9782017-02-15 12:12:10 -080011445 // Add exif debug data to internal metadata
11446 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11447 mm_jpeg_debug_exif_params_t *debug_params =
11448 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11449 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11450 // AE
11451 if (debug_params->ae_debug_params_valid == TRUE) {
11452 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11453 debug_params->ae_debug_params);
11454 }
11455 // AWB
11456 if (debug_params->awb_debug_params_valid == TRUE) {
11457 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11458 debug_params->awb_debug_params);
11459 }
11460 // AF
11461 if (debug_params->af_debug_params_valid == TRUE) {
11462 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11463 debug_params->af_debug_params);
11464 }
11465 // ASD
11466 if (debug_params->asd_debug_params_valid == TRUE) {
11467 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11468 debug_params->asd_debug_params);
11469 }
11470 // Stats
11471 if (debug_params->stats_debug_params_valid == TRUE) {
11472 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11473 debug_params->stats_debug_params);
11474 }
11475 // BE Stats
11476 if (debug_params->bestats_debug_params_valid == TRUE) {
11477 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11478 debug_params->bestats_debug_params);
11479 }
11480 // BHIST
11481 if (debug_params->bhist_debug_params_valid == TRUE) {
11482 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11483 debug_params->bhist_debug_params);
11484 }
11485 // 3A Tuning
11486 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11487 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11488 debug_params->q3a_tuning_debug_params);
11489 }
11490 }
11491
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011492 // Add metadata which reprocess needs
11493 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11494 cam_reprocess_info_t *repro_info =
11495 (cam_reprocess_info_t *)frame_settings.find
11496 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011497 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011498 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011499 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011500 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011501 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011502 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011503 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011504 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011505 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011506 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011507 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011508 repro_info->pipeline_flip);
11509 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11510 repro_info->af_roi);
11511 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11512 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011513 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11514 CAM_INTF_PARM_ROTATION metadata then has been added in
11515 translateToHalMetadata. HAL need to keep this new rotation
11516 metadata. Otherwise, the old rotation info saved in the vendor tag
11517 would be used */
11518 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11519 CAM_INTF_PARM_ROTATION, reprocParam) {
11520 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11521 } else {
11522 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011523 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011524 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011525 }
11526
11527 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11528 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11529 roi.width and roi.height would be the final JPEG size.
11530 For now, HAL only checks this for reprocess request */
11531 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11532 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11533 uint8_t *enable =
11534 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11535 if (*enable == TRUE) {
11536 int32_t *crop_data =
11537 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11538 cam_stream_crop_info_t crop_meta;
11539 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11540 crop_meta.stream_id = 0;
11541 crop_meta.crop.left = crop_data[0];
11542 crop_meta.crop.top = crop_data[1];
11543 crop_meta.crop.width = crop_data[2];
11544 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011545 // The JPEG crop roi should match cpp output size
11546 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11547 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11548 crop_meta.roi_map.left = 0;
11549 crop_meta.roi_map.top = 0;
11550 crop_meta.roi_map.width = cpp_crop->crop.width;
11551 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011552 }
11553 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11554 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011555 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011556 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011557 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11558 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011559 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011560 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11561
11562 // Add JPEG scale information
11563 cam_dimension_t scale_dim;
11564 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11565 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11566 int32_t *roi =
11567 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11568 scale_dim.width = roi[2];
11569 scale_dim.height = roi[3];
11570 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11571 scale_dim);
11572 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11573 scale_dim.width, scale_dim.height, mCameraId);
11574 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011575 }
11576 }
11577
11578 return rc;
11579}
11580
11581/*===========================================================================
11582 * FUNCTION : saveRequestSettings
11583 *
11584 * DESCRIPTION: Add any settings that might have changed to the request settings
11585 * and save the settings to be applied on the frame
11586 *
11587 * PARAMETERS :
11588 * @jpegMetadata : the extracted and/or modified jpeg metadata
11589 * @request : request with initial settings
11590 *
11591 * RETURN :
11592 * camera_metadata_t* : pointer to the saved request settings
11593 *==========================================================================*/
11594camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11595 const CameraMetadata &jpegMetadata,
11596 camera3_capture_request_t *request)
11597{
11598 camera_metadata_t *resultMetadata;
11599 CameraMetadata camMetadata;
11600 camMetadata = request->settings;
11601
11602 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11603 int32_t thumbnail_size[2];
11604 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11605 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11606 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11607 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11608 }
11609
11610 if (request->input_buffer != NULL) {
11611 uint8_t reprocessFlags = 1;
11612 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11613 (uint8_t*)&reprocessFlags,
11614 sizeof(reprocessFlags));
11615 }
11616
11617 resultMetadata = camMetadata.release();
11618 return resultMetadata;
11619}
11620
11621/*===========================================================================
11622 * FUNCTION : setHalFpsRange
11623 *
11624 * DESCRIPTION: set FPS range parameter
11625 *
11626 *
11627 * PARAMETERS :
11628 * @settings : Metadata from framework
11629 * @hal_metadata: Metadata buffer
11630 *
11631 *
11632 * RETURN : success: NO_ERROR
11633 * failure:
11634 *==========================================================================*/
11635int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11636 metadata_buffer_t *hal_metadata)
11637{
11638 int32_t rc = NO_ERROR;
11639 cam_fps_range_t fps_range;
11640 fps_range.min_fps = (float)
11641 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11642 fps_range.max_fps = (float)
11643 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11644 fps_range.video_min_fps = fps_range.min_fps;
11645 fps_range.video_max_fps = fps_range.max_fps;
11646
11647 LOGD("aeTargetFpsRange fps: [%f %f]",
11648 fps_range.min_fps, fps_range.max_fps);
11649 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11650 * follows:
11651 * ---------------------------------------------------------------|
11652 * Video stream is absent in configure_streams |
11653 * (Camcorder preview before the first video record |
11654 * ---------------------------------------------------------------|
11655 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11656 * | | | vid_min/max_fps|
11657 * ---------------------------------------------------------------|
11658 * NO | [ 30, 240] | 240 | [240, 240] |
11659 * |-------------|-------------|----------------|
11660 * | [240, 240] | 240 | [240, 240] |
11661 * ---------------------------------------------------------------|
11662 * Video stream is present in configure_streams |
11663 * ---------------------------------------------------------------|
11664 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11665 * | | | vid_min/max_fps|
11666 * ---------------------------------------------------------------|
11667 * NO | [ 30, 240] | 240 | [240, 240] |
11668 * (camcorder prev |-------------|-------------|----------------|
11669 * after video rec | [240, 240] | 240 | [240, 240] |
11670 * is stopped) | | | |
11671 * ---------------------------------------------------------------|
11672 * YES | [ 30, 240] | 240 | [240, 240] |
11673 * |-------------|-------------|----------------|
11674 * | [240, 240] | 240 | [240, 240] |
11675 * ---------------------------------------------------------------|
11676 * When Video stream is absent in configure_streams,
11677 * preview fps = sensor_fps / batchsize
11678 * Eg: for 240fps at batchSize 4, preview = 60fps
11679 * for 120fps at batchSize 4, preview = 30fps
11680 *
11681 * When video stream is present in configure_streams, preview fps is as per
11682 * the ratio of preview buffers to video buffers requested in process
11683 * capture request
11684 */
11685 mBatchSize = 0;
11686 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11687 fps_range.min_fps = fps_range.video_max_fps;
11688 fps_range.video_min_fps = fps_range.video_max_fps;
11689 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11690 fps_range.max_fps);
11691 if (NAME_NOT_FOUND != val) {
11692 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11693 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11694 return BAD_VALUE;
11695 }
11696
11697 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11698 /* If batchmode is currently in progress and the fps changes,
11699 * set the flag to restart the sensor */
11700 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11701 (mHFRVideoFps != fps_range.max_fps)) {
11702 mNeedSensorRestart = true;
11703 }
11704 mHFRVideoFps = fps_range.max_fps;
11705 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11706 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11707 mBatchSize = MAX_HFR_BATCH_SIZE;
11708 }
11709 }
11710 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11711
11712 }
11713 } else {
11714 /* HFR mode is session param in backend/ISP. This should be reset when
11715 * in non-HFR mode */
11716 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11717 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11718 return BAD_VALUE;
11719 }
11720 }
11721 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11722 return BAD_VALUE;
11723 }
11724 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11725 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11726 return rc;
11727}
11728
11729/*===========================================================================
11730 * FUNCTION : translateToHalMetadata
11731 *
11732 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11733 *
11734 *
11735 * PARAMETERS :
11736 * @request : request sent from framework
11737 *
11738 *
11739 * RETURN : success: NO_ERROR
11740 * failure:
11741 *==========================================================================*/
11742int QCamera3HardwareInterface::translateToHalMetadata
11743 (const camera3_capture_request_t *request,
11744 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011745 uint32_t snapshotStreamId) {
11746 if (request == nullptr || hal_metadata == nullptr) {
11747 return BAD_VALUE;
11748 }
11749
11750 int64_t minFrameDuration = getMinFrameDuration(request);
11751
11752 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11753 minFrameDuration);
11754}
11755
11756int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11757 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11758 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11759
Thierry Strudel3d639192016-09-09 11:52:26 -070011760 int rc = 0;
11761 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011762 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011763
11764 /* Do not change the order of the following list unless you know what you are
11765 * doing.
11766 * The order is laid out in such a way that parameters in the front of the table
11767 * may be used to override the parameters later in the table. Examples are:
11768 * 1. META_MODE should precede AEC/AWB/AF MODE
11769 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11770 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11771 * 4. Any mode should precede it's corresponding settings
11772 */
11773 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11774 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11775 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11776 rc = BAD_VALUE;
11777 }
11778 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11779 if (rc != NO_ERROR) {
11780 LOGE("extractSceneMode failed");
11781 }
11782 }
11783
11784 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11785 uint8_t fwk_aeMode =
11786 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11787 uint8_t aeMode;
11788 int32_t redeye;
11789
11790 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11791 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011792 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11793 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011794 } else {
11795 aeMode = CAM_AE_MODE_ON;
11796 }
11797 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11798 redeye = 1;
11799 } else {
11800 redeye = 0;
11801 }
11802
11803 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11804 fwk_aeMode);
11805 if (NAME_NOT_FOUND != val) {
11806 int32_t flashMode = (int32_t)val;
11807 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11808 }
11809
11810 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11811 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11812 rc = BAD_VALUE;
11813 }
11814 }
11815
11816 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11817 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11818 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11819 fwk_whiteLevel);
11820 if (NAME_NOT_FOUND != val) {
11821 uint8_t whiteLevel = (uint8_t)val;
11822 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11823 rc = BAD_VALUE;
11824 }
11825 }
11826 }
11827
11828 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11829 uint8_t fwk_cacMode =
11830 frame_settings.find(
11831 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11832 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11833 fwk_cacMode);
11834 if (NAME_NOT_FOUND != val) {
11835 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11836 bool entryAvailable = FALSE;
11837 // Check whether Frameworks set CAC mode is supported in device or not
11838 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11839 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11840 entryAvailable = TRUE;
11841 break;
11842 }
11843 }
11844 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11845 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11846 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11847 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11848 if (entryAvailable == FALSE) {
11849 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11850 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11851 } else {
11852 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11853 // High is not supported and so set the FAST as spec say's underlying
11854 // device implementation can be the same for both modes.
11855 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11856 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11857 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11858 // in order to avoid the fps drop due to high quality
11859 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11860 } else {
11861 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11862 }
11863 }
11864 }
11865 LOGD("Final cacMode is %d", cacMode);
11866 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11867 rc = BAD_VALUE;
11868 }
11869 } else {
11870 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11871 }
11872 }
11873
Thierry Strudel2896d122017-02-23 19:18:03 -080011874 char af_value[PROPERTY_VALUE_MAX];
11875 property_get("persist.camera.af.infinity", af_value, "0");
11876
Jason Lee84ae9972017-02-24 13:24:24 -080011877 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011878 if (atoi(af_value) == 0) {
11879 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011880 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011881 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11882 fwk_focusMode);
11883 if (NAME_NOT_FOUND != val) {
11884 uint8_t focusMode = (uint8_t)val;
11885 LOGD("set focus mode %d", focusMode);
11886 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11887 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11888 rc = BAD_VALUE;
11889 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011890 }
11891 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011892 } else {
11893 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11894 LOGE("Focus forced to infinity %d", focusMode);
11895 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11896 rc = BAD_VALUE;
11897 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011898 }
11899
Jason Lee84ae9972017-02-24 13:24:24 -080011900 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11901 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011902 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11903 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11904 focalDistance)) {
11905 rc = BAD_VALUE;
11906 }
11907 }
11908
11909 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11910 uint8_t fwk_antibandingMode =
11911 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11912 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11913 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11914 if (NAME_NOT_FOUND != val) {
11915 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011916 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11917 if (m60HzZone) {
11918 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11919 } else {
11920 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11921 }
11922 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011923 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11924 hal_antibandingMode)) {
11925 rc = BAD_VALUE;
11926 }
11927 }
11928 }
11929
11930 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11931 int32_t expCompensation = frame_settings.find(
11932 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11933 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11934 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11935 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11936 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011937 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011938 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11939 expCompensation)) {
11940 rc = BAD_VALUE;
11941 }
11942 }
11943
11944 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11945 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11946 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11947 rc = BAD_VALUE;
11948 }
11949 }
11950 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11951 rc = setHalFpsRange(frame_settings, hal_metadata);
11952 if (rc != NO_ERROR) {
11953 LOGE("setHalFpsRange failed");
11954 }
11955 }
11956
11957 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11958 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11959 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11960 rc = BAD_VALUE;
11961 }
11962 }
11963
11964 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11965 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11966 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11967 fwk_effectMode);
11968 if (NAME_NOT_FOUND != val) {
11969 uint8_t effectMode = (uint8_t)val;
11970 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11971 rc = BAD_VALUE;
11972 }
11973 }
11974 }
11975
11976 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11977 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11978 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11979 colorCorrectMode)) {
11980 rc = BAD_VALUE;
11981 }
11982 }
11983
11984 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11985 cam_color_correct_gains_t colorCorrectGains;
11986 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11987 colorCorrectGains.gains[i] =
11988 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11989 }
11990 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11991 colorCorrectGains)) {
11992 rc = BAD_VALUE;
11993 }
11994 }
11995
11996 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11997 cam_color_correct_matrix_t colorCorrectTransform;
11998 cam_rational_type_t transform_elem;
11999 size_t num = 0;
12000 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12001 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12002 transform_elem.numerator =
12003 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12004 transform_elem.denominator =
12005 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12006 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12007 num++;
12008 }
12009 }
12010 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12011 colorCorrectTransform)) {
12012 rc = BAD_VALUE;
12013 }
12014 }
12015
12016 cam_trigger_t aecTrigger;
12017 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12018 aecTrigger.trigger_id = -1;
12019 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12020 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12021 aecTrigger.trigger =
12022 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12023 aecTrigger.trigger_id =
12024 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12025 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12026 aecTrigger)) {
12027 rc = BAD_VALUE;
12028 }
12029 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12030 aecTrigger.trigger, aecTrigger.trigger_id);
12031 }
12032
12033 /*af_trigger must come with a trigger id*/
12034 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12035 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12036 cam_trigger_t af_trigger;
12037 af_trigger.trigger =
12038 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12039 af_trigger.trigger_id =
12040 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12041 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12042 rc = BAD_VALUE;
12043 }
12044 LOGD("AfTrigger: %d AfTriggerID: %d",
12045 af_trigger.trigger, af_trigger.trigger_id);
12046 }
12047
12048 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12049 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12050 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12051 rc = BAD_VALUE;
12052 }
12053 }
12054 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12055 cam_edge_application_t edge_application;
12056 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012057
Thierry Strudel3d639192016-09-09 11:52:26 -070012058 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12059 edge_application.sharpness = 0;
12060 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012061 edge_application.sharpness =
12062 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12063 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12064 int32_t sharpness =
12065 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12066 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12067 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12068 LOGD("Setting edge mode sharpness %d", sharpness);
12069 edge_application.sharpness = sharpness;
12070 }
12071 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012072 }
12073 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12074 rc = BAD_VALUE;
12075 }
12076 }
12077
12078 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12079 int32_t respectFlashMode = 1;
12080 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12081 uint8_t fwk_aeMode =
12082 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012083 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12084 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12085 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012086 respectFlashMode = 0;
12087 LOGH("AE Mode controls flash, ignore android.flash.mode");
12088 }
12089 }
12090 if (respectFlashMode) {
12091 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12092 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12093 LOGH("flash mode after mapping %d", val);
12094 // To check: CAM_INTF_META_FLASH_MODE usage
12095 if (NAME_NOT_FOUND != val) {
12096 uint8_t flashMode = (uint8_t)val;
12097 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12098 rc = BAD_VALUE;
12099 }
12100 }
12101 }
12102 }
12103
12104 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12105 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12106 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12107 rc = BAD_VALUE;
12108 }
12109 }
12110
12111 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12112 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12113 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12114 flashFiringTime)) {
12115 rc = BAD_VALUE;
12116 }
12117 }
12118
12119 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12120 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12121 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12122 hotPixelMode)) {
12123 rc = BAD_VALUE;
12124 }
12125 }
12126
12127 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12128 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12129 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12130 lensAperture)) {
12131 rc = BAD_VALUE;
12132 }
12133 }
12134
12135 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12136 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12137 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12138 filterDensity)) {
12139 rc = BAD_VALUE;
12140 }
12141 }
12142
12143 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12144 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12145 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12146 focalLength)) {
12147 rc = BAD_VALUE;
12148 }
12149 }
12150
12151 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12152 uint8_t optStabMode =
12153 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12154 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12155 optStabMode)) {
12156 rc = BAD_VALUE;
12157 }
12158 }
12159
12160 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12161 uint8_t videoStabMode =
12162 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12163 LOGD("videoStabMode from APP = %d", videoStabMode);
12164 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12165 videoStabMode)) {
12166 rc = BAD_VALUE;
12167 }
12168 }
12169
12170
12171 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12172 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12173 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12174 noiseRedMode)) {
12175 rc = BAD_VALUE;
12176 }
12177 }
12178
12179 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12180 float reprocessEffectiveExposureFactor =
12181 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12182 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12183 reprocessEffectiveExposureFactor)) {
12184 rc = BAD_VALUE;
12185 }
12186 }
12187
12188 cam_crop_region_t scalerCropRegion;
12189 bool scalerCropSet = false;
12190 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12191 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12192 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12193 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12194 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12195
12196 // Map coordinate system from active array to sensor output.
12197 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12198 scalerCropRegion.width, scalerCropRegion.height);
12199
12200 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12201 scalerCropRegion)) {
12202 rc = BAD_VALUE;
12203 }
12204 scalerCropSet = true;
12205 }
12206
12207 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12208 int64_t sensorExpTime =
12209 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12210 LOGD("setting sensorExpTime %lld", sensorExpTime);
12211 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12212 sensorExpTime)) {
12213 rc = BAD_VALUE;
12214 }
12215 }
12216
12217 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12218 int64_t sensorFrameDuration =
12219 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012220 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12221 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12222 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12223 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12224 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12225 sensorFrameDuration)) {
12226 rc = BAD_VALUE;
12227 }
12228 }
12229
12230 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12231 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12232 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12233 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12234 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12235 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12236 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12237 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12238 sensorSensitivity)) {
12239 rc = BAD_VALUE;
12240 }
12241 }
12242
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012243#ifndef USE_HAL_3_3
12244 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12245 int32_t ispSensitivity =
12246 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12247 if (ispSensitivity <
12248 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12249 ispSensitivity =
12250 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12251 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12252 }
12253 if (ispSensitivity >
12254 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12255 ispSensitivity =
12256 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12257 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12258 }
12259 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12260 ispSensitivity)) {
12261 rc = BAD_VALUE;
12262 }
12263 }
12264#endif
12265
Thierry Strudel3d639192016-09-09 11:52:26 -070012266 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12267 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12268 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12269 rc = BAD_VALUE;
12270 }
12271 }
12272
12273 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12274 uint8_t fwk_facedetectMode =
12275 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12276
12277 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12278 fwk_facedetectMode);
12279
12280 if (NAME_NOT_FOUND != val) {
12281 uint8_t facedetectMode = (uint8_t)val;
12282 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12283 facedetectMode)) {
12284 rc = BAD_VALUE;
12285 }
12286 }
12287 }
12288
Thierry Strudel54dc9782017-02-15 12:12:10 -080012289 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012290 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012291 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012292 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12293 histogramMode)) {
12294 rc = BAD_VALUE;
12295 }
12296 }
12297
12298 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12299 uint8_t sharpnessMapMode =
12300 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12301 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12302 sharpnessMapMode)) {
12303 rc = BAD_VALUE;
12304 }
12305 }
12306
12307 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12308 uint8_t tonemapMode =
12309 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12310 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12311 rc = BAD_VALUE;
12312 }
12313 }
12314 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12315 /*All tonemap channels will have the same number of points*/
12316 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12317 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12318 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12319 cam_rgb_tonemap_curves tonemapCurves;
12320 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12321 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12322 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12323 tonemapCurves.tonemap_points_cnt,
12324 CAM_MAX_TONEMAP_CURVE_SIZE);
12325 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12326 }
12327
12328 /* ch0 = G*/
12329 size_t point = 0;
12330 cam_tonemap_curve_t tonemapCurveGreen;
12331 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12332 for (size_t j = 0; j < 2; j++) {
12333 tonemapCurveGreen.tonemap_points[i][j] =
12334 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12335 point++;
12336 }
12337 }
12338 tonemapCurves.curves[0] = tonemapCurveGreen;
12339
12340 /* ch 1 = B */
12341 point = 0;
12342 cam_tonemap_curve_t tonemapCurveBlue;
12343 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12344 for (size_t j = 0; j < 2; j++) {
12345 tonemapCurveBlue.tonemap_points[i][j] =
12346 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12347 point++;
12348 }
12349 }
12350 tonemapCurves.curves[1] = tonemapCurveBlue;
12351
12352 /* ch 2 = R */
12353 point = 0;
12354 cam_tonemap_curve_t tonemapCurveRed;
12355 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12356 for (size_t j = 0; j < 2; j++) {
12357 tonemapCurveRed.tonemap_points[i][j] =
12358 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12359 point++;
12360 }
12361 }
12362 tonemapCurves.curves[2] = tonemapCurveRed;
12363
12364 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12365 tonemapCurves)) {
12366 rc = BAD_VALUE;
12367 }
12368 }
12369
12370 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12371 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12372 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12373 captureIntent)) {
12374 rc = BAD_VALUE;
12375 }
12376 }
12377
12378 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12379 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12380 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12381 blackLevelLock)) {
12382 rc = BAD_VALUE;
12383 }
12384 }
12385
12386 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12387 uint8_t lensShadingMapMode =
12388 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12389 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12390 lensShadingMapMode)) {
12391 rc = BAD_VALUE;
12392 }
12393 }
12394
12395 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12396 cam_area_t roi;
12397 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012398 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012399
12400 // Map coordinate system from active array to sensor output.
12401 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12402 roi.rect.height);
12403
12404 if (scalerCropSet) {
12405 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12406 }
12407 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12408 rc = BAD_VALUE;
12409 }
12410 }
12411
12412 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12413 cam_area_t roi;
12414 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012415 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012416
12417 // Map coordinate system from active array to sensor output.
12418 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12419 roi.rect.height);
12420
12421 if (scalerCropSet) {
12422 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12423 }
12424 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12425 rc = BAD_VALUE;
12426 }
12427 }
12428
12429 // CDS for non-HFR non-video mode
12430 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12431 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12432 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12433 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12434 LOGE("Invalid CDS mode %d!", *fwk_cds);
12435 } else {
12436 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12437 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12438 rc = BAD_VALUE;
12439 }
12440 }
12441 }
12442
Thierry Strudel04e026f2016-10-10 11:27:36 -070012443 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012444 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012445 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012446 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12447 }
12448 if (m_bVideoHdrEnabled)
12449 vhdr = CAM_VIDEO_HDR_MODE_ON;
12450
Thierry Strudel54dc9782017-02-15 12:12:10 -080012451 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12452
12453 if(vhdr != curr_hdr_state)
12454 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12455
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012456 rc = setVideoHdrMode(mParameters, vhdr);
12457 if (rc != NO_ERROR) {
12458 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012459 }
12460
12461 //IR
12462 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12463 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12464 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012465 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12466 uint8_t isIRon = 0;
12467
12468 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012469 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12470 LOGE("Invalid IR mode %d!", fwk_ir);
12471 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012472 if(isIRon != curr_ir_state )
12473 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12474
Thierry Strudel04e026f2016-10-10 11:27:36 -070012475 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12476 CAM_INTF_META_IR_MODE, fwk_ir)) {
12477 rc = BAD_VALUE;
12478 }
12479 }
12480 }
12481
Thierry Strudel54dc9782017-02-15 12:12:10 -080012482 //Binning Correction Mode
12483 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12484 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12485 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12486 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12487 || (0 > fwk_binning_correction)) {
12488 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12489 } else {
12490 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12491 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12492 rc = BAD_VALUE;
12493 }
12494 }
12495 }
12496
Thierry Strudel269c81a2016-10-12 12:13:59 -070012497 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12498 float aec_speed;
12499 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12500 LOGD("AEC Speed :%f", aec_speed);
12501 if ( aec_speed < 0 ) {
12502 LOGE("Invalid AEC mode %f!", aec_speed);
12503 } else {
12504 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12505 aec_speed)) {
12506 rc = BAD_VALUE;
12507 }
12508 }
12509 }
12510
12511 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12512 float awb_speed;
12513 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12514 LOGD("AWB Speed :%f", awb_speed);
12515 if ( awb_speed < 0 ) {
12516 LOGE("Invalid AWB mode %f!", awb_speed);
12517 } else {
12518 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12519 awb_speed)) {
12520 rc = BAD_VALUE;
12521 }
12522 }
12523 }
12524
Thierry Strudel3d639192016-09-09 11:52:26 -070012525 // TNR
12526 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12527 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12528 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012529 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012530 cam_denoise_param_t tnr;
12531 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12532 tnr.process_plates =
12533 (cam_denoise_process_type_t)frame_settings.find(
12534 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12535 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012536
12537 if(b_TnrRequested != curr_tnr_state)
12538 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12539
Thierry Strudel3d639192016-09-09 11:52:26 -070012540 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12541 rc = BAD_VALUE;
12542 }
12543 }
12544
Thierry Strudel54dc9782017-02-15 12:12:10 -080012545 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012546 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012547 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012548 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12549 *exposure_metering_mode)) {
12550 rc = BAD_VALUE;
12551 }
12552 }
12553
Thierry Strudel3d639192016-09-09 11:52:26 -070012554 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12555 int32_t fwk_testPatternMode =
12556 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12557 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12558 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12559
12560 if (NAME_NOT_FOUND != testPatternMode) {
12561 cam_test_pattern_data_t testPatternData;
12562 memset(&testPatternData, 0, sizeof(testPatternData));
12563 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12564 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12565 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12566 int32_t *fwk_testPatternData =
12567 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12568 testPatternData.r = fwk_testPatternData[0];
12569 testPatternData.b = fwk_testPatternData[3];
12570 switch (gCamCapability[mCameraId]->color_arrangement) {
12571 case CAM_FILTER_ARRANGEMENT_RGGB:
12572 case CAM_FILTER_ARRANGEMENT_GRBG:
12573 testPatternData.gr = fwk_testPatternData[1];
12574 testPatternData.gb = fwk_testPatternData[2];
12575 break;
12576 case CAM_FILTER_ARRANGEMENT_GBRG:
12577 case CAM_FILTER_ARRANGEMENT_BGGR:
12578 testPatternData.gr = fwk_testPatternData[2];
12579 testPatternData.gb = fwk_testPatternData[1];
12580 break;
12581 default:
12582 LOGE("color arrangement %d is not supported",
12583 gCamCapability[mCameraId]->color_arrangement);
12584 break;
12585 }
12586 }
12587 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12588 testPatternData)) {
12589 rc = BAD_VALUE;
12590 }
12591 } else {
12592 LOGE("Invalid framework sensor test pattern mode %d",
12593 fwk_testPatternMode);
12594 }
12595 }
12596
12597 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12598 size_t count = 0;
12599 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12600 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12601 gps_coords.data.d, gps_coords.count, count);
12602 if (gps_coords.count != count) {
12603 rc = BAD_VALUE;
12604 }
12605 }
12606
12607 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12608 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12609 size_t count = 0;
12610 const char *gps_methods_src = (const char *)
12611 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12612 memset(gps_methods, '\0', sizeof(gps_methods));
12613 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12614 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12615 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12616 if (GPS_PROCESSING_METHOD_SIZE != count) {
12617 rc = BAD_VALUE;
12618 }
12619 }
12620
12621 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12622 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12623 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12624 gps_timestamp)) {
12625 rc = BAD_VALUE;
12626 }
12627 }
12628
12629 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12630 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12631 cam_rotation_info_t rotation_info;
12632 if (orientation == 0) {
12633 rotation_info.rotation = ROTATE_0;
12634 } else if (orientation == 90) {
12635 rotation_info.rotation = ROTATE_90;
12636 } else if (orientation == 180) {
12637 rotation_info.rotation = ROTATE_180;
12638 } else if (orientation == 270) {
12639 rotation_info.rotation = ROTATE_270;
12640 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012641 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012642 rotation_info.streamId = snapshotStreamId;
12643 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12644 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12645 rc = BAD_VALUE;
12646 }
12647 }
12648
12649 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12650 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12651 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12652 rc = BAD_VALUE;
12653 }
12654 }
12655
12656 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12657 uint32_t thumb_quality = (uint32_t)
12658 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12659 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12660 thumb_quality)) {
12661 rc = BAD_VALUE;
12662 }
12663 }
12664
12665 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12666 cam_dimension_t dim;
12667 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12668 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12669 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12670 rc = BAD_VALUE;
12671 }
12672 }
12673
12674 // Internal metadata
12675 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12676 size_t count = 0;
12677 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12678 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12679 privatedata.data.i32, privatedata.count, count);
12680 if (privatedata.count != count) {
12681 rc = BAD_VALUE;
12682 }
12683 }
12684
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012685 // ISO/Exposure Priority
12686 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12687 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12688 cam_priority_mode_t mode =
12689 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12690 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12691 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12692 use_iso_exp_pty.previewOnly = FALSE;
12693 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12694 use_iso_exp_pty.value = *ptr;
12695
12696 if(CAM_ISO_PRIORITY == mode) {
12697 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12698 use_iso_exp_pty)) {
12699 rc = BAD_VALUE;
12700 }
12701 }
12702 else {
12703 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12704 use_iso_exp_pty)) {
12705 rc = BAD_VALUE;
12706 }
12707 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012708
12709 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12710 rc = BAD_VALUE;
12711 }
12712 }
12713 } else {
12714 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12715 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012716 }
12717 }
12718
12719 // Saturation
12720 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12721 int32_t* use_saturation =
12722 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12723 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12724 rc = BAD_VALUE;
12725 }
12726 }
12727
Thierry Strudel3d639192016-09-09 11:52:26 -070012728 // EV step
12729 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12730 gCamCapability[mCameraId]->exp_compensation_step)) {
12731 rc = BAD_VALUE;
12732 }
12733
12734 // CDS info
12735 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12736 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12737 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12738
12739 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12740 CAM_INTF_META_CDS_DATA, *cdsData)) {
12741 rc = BAD_VALUE;
12742 }
12743 }
12744
Shuzhen Wang19463d72016-03-08 11:09:52 -080012745 // Hybrid AE
12746 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12747 uint8_t *hybrid_ae = (uint8_t *)
12748 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12749
12750 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12751 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12752 rc = BAD_VALUE;
12753 }
12754 }
12755
Shuzhen Wang14415f52016-11-16 18:26:18 -080012756 // Histogram
12757 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12758 uint8_t histogramMode =
12759 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12760 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12761 histogramMode)) {
12762 rc = BAD_VALUE;
12763 }
12764 }
12765
12766 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12767 int32_t histogramBins =
12768 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12769 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12770 histogramBins)) {
12771 rc = BAD_VALUE;
12772 }
12773 }
12774
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012775 // Tracking AF
12776 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12777 uint8_t trackingAfTrigger =
12778 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12779 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12780 trackingAfTrigger)) {
12781 rc = BAD_VALUE;
12782 }
12783 }
12784
Thierry Strudel3d639192016-09-09 11:52:26 -070012785 return rc;
12786}
12787
12788/*===========================================================================
12789 * FUNCTION : captureResultCb
12790 *
12791 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12792 *
12793 * PARAMETERS :
12794 * @frame : frame information from mm-camera-interface
12795 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12796 * @userdata: userdata
12797 *
12798 * RETURN : NONE
12799 *==========================================================================*/
12800void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12801 camera3_stream_buffer_t *buffer,
12802 uint32_t frame_number, bool isInputBuffer, void *userdata)
12803{
12804 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12805 if (hw == NULL) {
12806 LOGE("Invalid hw %p", hw);
12807 return;
12808 }
12809
12810 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12811 return;
12812}
12813
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012814/*===========================================================================
12815 * FUNCTION : setBufferErrorStatus
12816 *
12817 * DESCRIPTION: Callback handler for channels to report any buffer errors
12818 *
12819 * PARAMETERS :
12820 * @ch : Channel on which buffer error is reported from
12821 * @frame_number : frame number on which buffer error is reported on
12822 * @buffer_status : buffer error status
12823 * @userdata: userdata
12824 *
12825 * RETURN : NONE
12826 *==========================================================================*/
12827void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12828 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12829{
12830 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12831 if (hw == NULL) {
12832 LOGE("Invalid hw %p", hw);
12833 return;
12834 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012835
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012836 hw->setBufferErrorStatus(ch, frame_number, err);
12837 return;
12838}
12839
12840void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12841 uint32_t frameNumber, camera3_buffer_status_t err)
12842{
12843 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12844 pthread_mutex_lock(&mMutex);
12845
12846 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12847 if (req.frame_number != frameNumber)
12848 continue;
12849 for (auto& k : req.mPendingBufferList) {
12850 if(k.stream->priv == ch) {
12851 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12852 }
12853 }
12854 }
12855
12856 pthread_mutex_unlock(&mMutex);
12857 return;
12858}
Thierry Strudel3d639192016-09-09 11:52:26 -070012859/*===========================================================================
12860 * FUNCTION : initialize
12861 *
12862 * DESCRIPTION: Pass framework callback pointers to HAL
12863 *
12864 * PARAMETERS :
12865 *
12866 *
12867 * RETURN : Success : 0
12868 * Failure: -ENODEV
12869 *==========================================================================*/
12870
12871int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12872 const camera3_callback_ops_t *callback_ops)
12873{
12874 LOGD("E");
12875 QCamera3HardwareInterface *hw =
12876 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12877 if (!hw) {
12878 LOGE("NULL camera device");
12879 return -ENODEV;
12880 }
12881
12882 int rc = hw->initialize(callback_ops);
12883 LOGD("X");
12884 return rc;
12885}
12886
12887/*===========================================================================
12888 * FUNCTION : configure_streams
12889 *
12890 * DESCRIPTION:
12891 *
12892 * PARAMETERS :
12893 *
12894 *
12895 * RETURN : Success: 0
12896 * Failure: -EINVAL (if stream configuration is invalid)
12897 * -ENODEV (fatal error)
12898 *==========================================================================*/
12899
12900int QCamera3HardwareInterface::configure_streams(
12901 const struct camera3_device *device,
12902 camera3_stream_configuration_t *stream_list)
12903{
12904 LOGD("E");
12905 QCamera3HardwareInterface *hw =
12906 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12907 if (!hw) {
12908 LOGE("NULL camera device");
12909 return -ENODEV;
12910 }
12911 int rc = hw->configureStreams(stream_list);
12912 LOGD("X");
12913 return rc;
12914}
12915
12916/*===========================================================================
12917 * FUNCTION : construct_default_request_settings
12918 *
12919 * DESCRIPTION: Configure a settings buffer to meet the required use case
12920 *
12921 * PARAMETERS :
12922 *
12923 *
12924 * RETURN : Success: Return valid metadata
12925 * Failure: Return NULL
12926 *==========================================================================*/
12927const camera_metadata_t* QCamera3HardwareInterface::
12928 construct_default_request_settings(const struct camera3_device *device,
12929 int type)
12930{
12931
12932 LOGD("E");
12933 camera_metadata_t* fwk_metadata = NULL;
12934 QCamera3HardwareInterface *hw =
12935 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12936 if (!hw) {
12937 LOGE("NULL camera device");
12938 return NULL;
12939 }
12940
12941 fwk_metadata = hw->translateCapabilityToMetadata(type);
12942
12943 LOGD("X");
12944 return fwk_metadata;
12945}
12946
12947/*===========================================================================
12948 * FUNCTION : process_capture_request
12949 *
12950 * DESCRIPTION:
12951 *
12952 * PARAMETERS :
12953 *
12954 *
12955 * RETURN :
12956 *==========================================================================*/
12957int QCamera3HardwareInterface::process_capture_request(
12958 const struct camera3_device *device,
12959 camera3_capture_request_t *request)
12960{
12961 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012962 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012963 QCamera3HardwareInterface *hw =
12964 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12965 if (!hw) {
12966 LOGE("NULL camera device");
12967 return -EINVAL;
12968 }
12969
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012970 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012971 LOGD("X");
12972 return rc;
12973}
12974
12975/*===========================================================================
12976 * FUNCTION : dump
12977 *
12978 * DESCRIPTION:
12979 *
12980 * PARAMETERS :
12981 *
12982 *
12983 * RETURN :
12984 *==========================================================================*/
12985
12986void QCamera3HardwareInterface::dump(
12987 const struct camera3_device *device, int fd)
12988{
12989 /* Log level property is read when "adb shell dumpsys media.camera" is
12990 called so that the log level can be controlled without restarting
12991 the media server */
12992 getLogLevel();
12993
12994 LOGD("E");
12995 QCamera3HardwareInterface *hw =
12996 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12997 if (!hw) {
12998 LOGE("NULL camera device");
12999 return;
13000 }
13001
13002 hw->dump(fd);
13003 LOGD("X");
13004 return;
13005}
13006
13007/*===========================================================================
13008 * FUNCTION : flush
13009 *
13010 * DESCRIPTION:
13011 *
13012 * PARAMETERS :
13013 *
13014 *
13015 * RETURN :
13016 *==========================================================================*/
13017
13018int QCamera3HardwareInterface::flush(
13019 const struct camera3_device *device)
13020{
13021 int rc;
13022 LOGD("E");
13023 QCamera3HardwareInterface *hw =
13024 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13025 if (!hw) {
13026 LOGE("NULL camera device");
13027 return -EINVAL;
13028 }
13029
13030 pthread_mutex_lock(&hw->mMutex);
13031 // Validate current state
13032 switch (hw->mState) {
13033 case STARTED:
13034 /* valid state */
13035 break;
13036
13037 case ERROR:
13038 pthread_mutex_unlock(&hw->mMutex);
13039 hw->handleCameraDeviceError();
13040 return -ENODEV;
13041
13042 default:
13043 LOGI("Flush returned during state %d", hw->mState);
13044 pthread_mutex_unlock(&hw->mMutex);
13045 return 0;
13046 }
13047 pthread_mutex_unlock(&hw->mMutex);
13048
13049 rc = hw->flush(true /* restart channels */ );
13050 LOGD("X");
13051 return rc;
13052}
13053
13054/*===========================================================================
13055 * FUNCTION : close_camera_device
13056 *
13057 * DESCRIPTION:
13058 *
13059 * PARAMETERS :
13060 *
13061 *
13062 * RETURN :
13063 *==========================================================================*/
13064int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13065{
13066 int ret = NO_ERROR;
13067 QCamera3HardwareInterface *hw =
13068 reinterpret_cast<QCamera3HardwareInterface *>(
13069 reinterpret_cast<camera3_device_t *>(device)->priv);
13070 if (!hw) {
13071 LOGE("NULL camera device");
13072 return BAD_VALUE;
13073 }
13074
13075 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13076 delete hw;
13077 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013078 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013079 return ret;
13080}
13081
13082/*===========================================================================
13083 * FUNCTION : getWaveletDenoiseProcessPlate
13084 *
13085 * DESCRIPTION: query wavelet denoise process plate
13086 *
13087 * PARAMETERS : None
13088 *
13089 * RETURN : WNR prcocess plate value
13090 *==========================================================================*/
13091cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13092{
13093 char prop[PROPERTY_VALUE_MAX];
13094 memset(prop, 0, sizeof(prop));
13095 property_get("persist.denoise.process.plates", prop, "0");
13096 int processPlate = atoi(prop);
13097 switch(processPlate) {
13098 case 0:
13099 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13100 case 1:
13101 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13102 case 2:
13103 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13104 case 3:
13105 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13106 default:
13107 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13108 }
13109}
13110
13111
13112/*===========================================================================
13113 * FUNCTION : getTemporalDenoiseProcessPlate
13114 *
13115 * DESCRIPTION: query temporal denoise process plate
13116 *
13117 * PARAMETERS : None
13118 *
13119 * RETURN : TNR prcocess plate value
13120 *==========================================================================*/
13121cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13122{
13123 char prop[PROPERTY_VALUE_MAX];
13124 memset(prop, 0, sizeof(prop));
13125 property_get("persist.tnr.process.plates", prop, "0");
13126 int processPlate = atoi(prop);
13127 switch(processPlate) {
13128 case 0:
13129 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13130 case 1:
13131 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13132 case 2:
13133 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13134 case 3:
13135 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13136 default:
13137 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13138 }
13139}
13140
13141
13142/*===========================================================================
13143 * FUNCTION : extractSceneMode
13144 *
13145 * DESCRIPTION: Extract scene mode from frameworks set metadata
13146 *
13147 * PARAMETERS :
13148 * @frame_settings: CameraMetadata reference
13149 * @metaMode: ANDROID_CONTORL_MODE
13150 * @hal_metadata: hal metadata structure
13151 *
13152 * RETURN : None
13153 *==========================================================================*/
13154int32_t QCamera3HardwareInterface::extractSceneMode(
13155 const CameraMetadata &frame_settings, uint8_t metaMode,
13156 metadata_buffer_t *hal_metadata)
13157{
13158 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013159 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13160
13161 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13162 LOGD("Ignoring control mode OFF_KEEP_STATE");
13163 return NO_ERROR;
13164 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013165
13166 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13167 camera_metadata_ro_entry entry =
13168 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13169 if (0 == entry.count)
13170 return rc;
13171
13172 uint8_t fwk_sceneMode = entry.data.u8[0];
13173
13174 int val = lookupHalName(SCENE_MODES_MAP,
13175 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13176 fwk_sceneMode);
13177 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013178 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013179 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013180 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013181 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013182
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013183 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13184 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13185 }
13186
13187 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13188 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013189 cam_hdr_param_t hdr_params;
13190 hdr_params.hdr_enable = 1;
13191 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13192 hdr_params.hdr_need_1x = false;
13193 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13194 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13195 rc = BAD_VALUE;
13196 }
13197 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013198
Thierry Strudel3d639192016-09-09 11:52:26 -070013199 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13200 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13201 rc = BAD_VALUE;
13202 }
13203 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013204
13205 if (mForceHdrSnapshot) {
13206 cam_hdr_param_t hdr_params;
13207 hdr_params.hdr_enable = 1;
13208 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13209 hdr_params.hdr_need_1x = false;
13210 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13211 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13212 rc = BAD_VALUE;
13213 }
13214 }
13215
Thierry Strudel3d639192016-09-09 11:52:26 -070013216 return rc;
13217}
13218
13219/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013220 * FUNCTION : setVideoHdrMode
13221 *
13222 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13223 *
13224 * PARAMETERS :
13225 * @hal_metadata: hal metadata structure
13226 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13227 *
13228 * RETURN : None
13229 *==========================================================================*/
13230int32_t QCamera3HardwareInterface::setVideoHdrMode(
13231 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13232{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013233 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13234 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13235 }
13236
13237 LOGE("Invalid Video HDR mode %d!", vhdr);
13238 return BAD_VALUE;
13239}
13240
13241/*===========================================================================
13242 * FUNCTION : setSensorHDR
13243 *
13244 * DESCRIPTION: Enable/disable sensor HDR.
13245 *
13246 * PARAMETERS :
13247 * @hal_metadata: hal metadata structure
13248 * @enable: boolean whether to enable/disable sensor HDR
13249 *
13250 * RETURN : None
13251 *==========================================================================*/
13252int32_t QCamera3HardwareInterface::setSensorHDR(
13253 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13254{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013255 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013256 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13257
13258 if (enable) {
13259 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13260 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13261 #ifdef _LE_CAMERA_
13262 //Default to staggered HDR for IOT
13263 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13264 #else
13265 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13266 #endif
13267 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13268 }
13269
13270 bool isSupported = false;
13271 switch (sensor_hdr) {
13272 case CAM_SENSOR_HDR_IN_SENSOR:
13273 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13274 CAM_QCOM_FEATURE_SENSOR_HDR) {
13275 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013276 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013277 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013278 break;
13279 case CAM_SENSOR_HDR_ZIGZAG:
13280 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13281 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13282 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013283 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013284 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013285 break;
13286 case CAM_SENSOR_HDR_STAGGERED:
13287 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13288 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13289 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013290 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013291 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013292 break;
13293 case CAM_SENSOR_HDR_OFF:
13294 isSupported = true;
13295 LOGD("Turning off sensor HDR");
13296 break;
13297 default:
13298 LOGE("HDR mode %d not supported", sensor_hdr);
13299 rc = BAD_VALUE;
13300 break;
13301 }
13302
13303 if(isSupported) {
13304 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13305 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13306 rc = BAD_VALUE;
13307 } else {
13308 if(!isVideoHdrEnable)
13309 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013310 }
13311 }
13312 return rc;
13313}
13314
13315/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013316 * FUNCTION : needRotationReprocess
13317 *
13318 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13319 *
13320 * PARAMETERS : none
13321 *
13322 * RETURN : true: needed
13323 * false: no need
13324 *==========================================================================*/
13325bool QCamera3HardwareInterface::needRotationReprocess()
13326{
13327 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13328 // current rotation is not zero, and pp has the capability to process rotation
13329 LOGH("need do reprocess for rotation");
13330 return true;
13331 }
13332
13333 return false;
13334}
13335
13336/*===========================================================================
13337 * FUNCTION : needReprocess
13338 *
13339 * DESCRIPTION: if reprocess in needed
13340 *
13341 * PARAMETERS : none
13342 *
13343 * RETURN : true: needed
13344 * false: no need
13345 *==========================================================================*/
13346bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13347{
13348 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13349 // TODO: add for ZSL HDR later
13350 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13351 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13352 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13353 return true;
13354 } else {
13355 LOGH("already post processed frame");
13356 return false;
13357 }
13358 }
13359 return needRotationReprocess();
13360}
13361
13362/*===========================================================================
13363 * FUNCTION : needJpegExifRotation
13364 *
13365 * DESCRIPTION: if rotation from jpeg is needed
13366 *
13367 * PARAMETERS : none
13368 *
13369 * RETURN : true: needed
13370 * false: no need
13371 *==========================================================================*/
13372bool QCamera3HardwareInterface::needJpegExifRotation()
13373{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013374 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013375 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13376 LOGD("Need use Jpeg EXIF Rotation");
13377 return true;
13378 }
13379 return false;
13380}
13381
13382/*===========================================================================
13383 * FUNCTION : addOfflineReprocChannel
13384 *
13385 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13386 * coming from input channel
13387 *
13388 * PARAMETERS :
13389 * @config : reprocess configuration
13390 * @inputChHandle : pointer to the input (source) channel
13391 *
13392 *
13393 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13394 *==========================================================================*/
13395QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13396 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13397{
13398 int32_t rc = NO_ERROR;
13399 QCamera3ReprocessChannel *pChannel = NULL;
13400
13401 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013402 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13403 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013404 if (NULL == pChannel) {
13405 LOGE("no mem for reprocess channel");
13406 return NULL;
13407 }
13408
13409 rc = pChannel->initialize(IS_TYPE_NONE);
13410 if (rc != NO_ERROR) {
13411 LOGE("init reprocess channel failed, ret = %d", rc);
13412 delete pChannel;
13413 return NULL;
13414 }
13415
13416 // pp feature config
13417 cam_pp_feature_config_t pp_config;
13418 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13419
13420 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13421 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13422 & CAM_QCOM_FEATURE_DSDN) {
13423 //Use CPP CDS incase h/w supports it.
13424 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13425 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13426 }
13427 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13428 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13429 }
13430
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013431 if (config.hdr_param.hdr_enable) {
13432 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13433 pp_config.hdr_param = config.hdr_param;
13434 }
13435
13436 if (mForceHdrSnapshot) {
13437 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13438 pp_config.hdr_param.hdr_enable = 1;
13439 pp_config.hdr_param.hdr_need_1x = 0;
13440 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13441 }
13442
Thierry Strudel3d639192016-09-09 11:52:26 -070013443 rc = pChannel->addReprocStreamsFromSource(pp_config,
13444 config,
13445 IS_TYPE_NONE,
13446 mMetadataChannel);
13447
13448 if (rc != NO_ERROR) {
13449 delete pChannel;
13450 return NULL;
13451 }
13452 return pChannel;
13453}
13454
13455/*===========================================================================
13456 * FUNCTION : getMobicatMask
13457 *
13458 * DESCRIPTION: returns mobicat mask
13459 *
13460 * PARAMETERS : none
13461 *
13462 * RETURN : mobicat mask
13463 *
13464 *==========================================================================*/
13465uint8_t QCamera3HardwareInterface::getMobicatMask()
13466{
13467 return m_MobicatMask;
13468}
13469
13470/*===========================================================================
13471 * FUNCTION : setMobicat
13472 *
13473 * DESCRIPTION: set Mobicat on/off.
13474 *
13475 * PARAMETERS :
13476 * @params : none
13477 *
13478 * RETURN : int32_t type of status
13479 * NO_ERROR -- success
13480 * none-zero failure code
13481 *==========================================================================*/
13482int32_t QCamera3HardwareInterface::setMobicat()
13483{
13484 char value [PROPERTY_VALUE_MAX];
13485 property_get("persist.camera.mobicat", value, "0");
13486 int32_t ret = NO_ERROR;
13487 uint8_t enableMobi = (uint8_t)atoi(value);
13488
13489 if (enableMobi) {
13490 tune_cmd_t tune_cmd;
13491 tune_cmd.type = SET_RELOAD_CHROMATIX;
13492 tune_cmd.module = MODULE_ALL;
13493 tune_cmd.value = TRUE;
13494 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13495 CAM_INTF_PARM_SET_VFE_COMMAND,
13496 tune_cmd);
13497
13498 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13499 CAM_INTF_PARM_SET_PP_COMMAND,
13500 tune_cmd);
13501 }
13502 m_MobicatMask = enableMobi;
13503
13504 return ret;
13505}
13506
13507/*===========================================================================
13508* FUNCTION : getLogLevel
13509*
13510* DESCRIPTION: Reads the log level property into a variable
13511*
13512* PARAMETERS :
13513* None
13514*
13515* RETURN :
13516* None
13517*==========================================================================*/
13518void QCamera3HardwareInterface::getLogLevel()
13519{
13520 char prop[PROPERTY_VALUE_MAX];
13521 uint32_t globalLogLevel = 0;
13522
13523 property_get("persist.camera.hal.debug", prop, "0");
13524 int val = atoi(prop);
13525 if (0 <= val) {
13526 gCamHal3LogLevel = (uint32_t)val;
13527 }
13528
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013529 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013530 gKpiDebugLevel = atoi(prop);
13531
13532 property_get("persist.camera.global.debug", prop, "0");
13533 val = atoi(prop);
13534 if (0 <= val) {
13535 globalLogLevel = (uint32_t)val;
13536 }
13537
13538 /* Highest log level among hal.logs and global.logs is selected */
13539 if (gCamHal3LogLevel < globalLogLevel)
13540 gCamHal3LogLevel = globalLogLevel;
13541
13542 return;
13543}
13544
13545/*===========================================================================
13546 * FUNCTION : validateStreamRotations
13547 *
13548 * DESCRIPTION: Check if the rotations requested are supported
13549 *
13550 * PARAMETERS :
13551 * @stream_list : streams to be configured
13552 *
13553 * RETURN : NO_ERROR on success
13554 * -EINVAL on failure
13555 *
13556 *==========================================================================*/
13557int QCamera3HardwareInterface::validateStreamRotations(
13558 camera3_stream_configuration_t *streamList)
13559{
13560 int rc = NO_ERROR;
13561
13562 /*
13563 * Loop through all streams requested in configuration
13564 * Check if unsupported rotations have been requested on any of them
13565 */
13566 for (size_t j = 0; j < streamList->num_streams; j++){
13567 camera3_stream_t *newStream = streamList->streams[j];
13568
13569 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13570 bool isImplDef = (newStream->format ==
13571 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13572 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13573 isImplDef);
13574
13575 if (isRotated && (!isImplDef || isZsl)) {
13576 LOGE("Error: Unsupported rotation of %d requested for stream"
13577 "type:%d and stream format:%d",
13578 newStream->rotation, newStream->stream_type,
13579 newStream->format);
13580 rc = -EINVAL;
13581 break;
13582 }
13583 }
13584
13585 return rc;
13586}
13587
13588/*===========================================================================
13589* FUNCTION : getFlashInfo
13590*
13591* DESCRIPTION: Retrieve information about whether the device has a flash.
13592*
13593* PARAMETERS :
13594* @cameraId : Camera id to query
13595* @hasFlash : Boolean indicating whether there is a flash device
13596* associated with given camera
13597* @flashNode : If a flash device exists, this will be its device node.
13598*
13599* RETURN :
13600* None
13601*==========================================================================*/
13602void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13603 bool& hasFlash,
13604 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13605{
13606 cam_capability_t* camCapability = gCamCapability[cameraId];
13607 if (NULL == camCapability) {
13608 hasFlash = false;
13609 flashNode[0] = '\0';
13610 } else {
13611 hasFlash = camCapability->flash_available;
13612 strlcpy(flashNode,
13613 (char*)camCapability->flash_dev_name,
13614 QCAMERA_MAX_FILEPATH_LENGTH);
13615 }
13616}
13617
13618/*===========================================================================
13619* FUNCTION : getEepromVersionInfo
13620*
13621* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13622*
13623* PARAMETERS : None
13624*
13625* RETURN : string describing EEPROM version
13626* "\0" if no such info available
13627*==========================================================================*/
13628const char *QCamera3HardwareInterface::getEepromVersionInfo()
13629{
13630 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13631}
13632
13633/*===========================================================================
13634* FUNCTION : getLdafCalib
13635*
13636* DESCRIPTION: Retrieve Laser AF calibration data
13637*
13638* PARAMETERS : None
13639*
13640* RETURN : Two uint32_t describing laser AF calibration data
13641* NULL if none is available.
13642*==========================================================================*/
13643const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13644{
13645 if (mLdafCalibExist) {
13646 return &mLdafCalib[0];
13647 } else {
13648 return NULL;
13649 }
13650}
13651
13652/*===========================================================================
13653 * FUNCTION : dynamicUpdateMetaStreamInfo
13654 *
13655 * DESCRIPTION: This function:
13656 * (1) stops all the channels
13657 * (2) returns error on pending requests and buffers
13658 * (3) sends metastream_info in setparams
13659 * (4) starts all channels
13660 * This is useful when sensor has to be restarted to apply any
13661 * settings such as frame rate from a different sensor mode
13662 *
13663 * PARAMETERS : None
13664 *
13665 * RETURN : NO_ERROR on success
13666 * Error codes on failure
13667 *
13668 *==========================================================================*/
13669int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13670{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013671 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013672 int rc = NO_ERROR;
13673
13674 LOGD("E");
13675
13676 rc = stopAllChannels();
13677 if (rc < 0) {
13678 LOGE("stopAllChannels failed");
13679 return rc;
13680 }
13681
13682 rc = notifyErrorForPendingRequests();
13683 if (rc < 0) {
13684 LOGE("notifyErrorForPendingRequests failed");
13685 return rc;
13686 }
13687
13688 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13689 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13690 "Format:%d",
13691 mStreamConfigInfo.type[i],
13692 mStreamConfigInfo.stream_sizes[i].width,
13693 mStreamConfigInfo.stream_sizes[i].height,
13694 mStreamConfigInfo.postprocess_mask[i],
13695 mStreamConfigInfo.format[i]);
13696 }
13697
13698 /* Send meta stream info once again so that ISP can start */
13699 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13700 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13701 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13702 mParameters);
13703 if (rc < 0) {
13704 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13705 }
13706
13707 rc = startAllChannels();
13708 if (rc < 0) {
13709 LOGE("startAllChannels failed");
13710 return rc;
13711 }
13712
13713 LOGD("X");
13714 return rc;
13715}
13716
13717/*===========================================================================
13718 * FUNCTION : stopAllChannels
13719 *
13720 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13721 *
13722 * PARAMETERS : None
13723 *
13724 * RETURN : NO_ERROR on success
13725 * Error codes on failure
13726 *
13727 *==========================================================================*/
13728int32_t QCamera3HardwareInterface::stopAllChannels()
13729{
13730 int32_t rc = NO_ERROR;
13731
13732 LOGD("Stopping all channels");
13733 // Stop the Streams/Channels
13734 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13735 it != mStreamInfo.end(); it++) {
13736 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13737 if (channel) {
13738 channel->stop();
13739 }
13740 (*it)->status = INVALID;
13741 }
13742
13743 if (mSupportChannel) {
13744 mSupportChannel->stop();
13745 }
13746 if (mAnalysisChannel) {
13747 mAnalysisChannel->stop();
13748 }
13749 if (mRawDumpChannel) {
13750 mRawDumpChannel->stop();
13751 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013752 if (mHdrPlusRawSrcChannel) {
13753 mHdrPlusRawSrcChannel->stop();
13754 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013755 if (mMetadataChannel) {
13756 /* If content of mStreamInfo is not 0, there is metadata stream */
13757 mMetadataChannel->stop();
13758 }
13759
13760 LOGD("All channels stopped");
13761 return rc;
13762}
13763
13764/*===========================================================================
13765 * FUNCTION : startAllChannels
13766 *
13767 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13768 *
13769 * PARAMETERS : None
13770 *
13771 * RETURN : NO_ERROR on success
13772 * Error codes on failure
13773 *
13774 *==========================================================================*/
13775int32_t QCamera3HardwareInterface::startAllChannels()
13776{
13777 int32_t rc = NO_ERROR;
13778
13779 LOGD("Start all channels ");
13780 // Start the Streams/Channels
13781 if (mMetadataChannel) {
13782 /* If content of mStreamInfo is not 0, there is metadata stream */
13783 rc = mMetadataChannel->start();
13784 if (rc < 0) {
13785 LOGE("META channel start failed");
13786 return rc;
13787 }
13788 }
13789 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13790 it != mStreamInfo.end(); it++) {
13791 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13792 if (channel) {
13793 rc = channel->start();
13794 if (rc < 0) {
13795 LOGE("channel start failed");
13796 return rc;
13797 }
13798 }
13799 }
13800 if (mAnalysisChannel) {
13801 mAnalysisChannel->start();
13802 }
13803 if (mSupportChannel) {
13804 rc = mSupportChannel->start();
13805 if (rc < 0) {
13806 LOGE("Support channel start failed");
13807 return rc;
13808 }
13809 }
13810 if (mRawDumpChannel) {
13811 rc = mRawDumpChannel->start();
13812 if (rc < 0) {
13813 LOGE("RAW dump channel start failed");
13814 return rc;
13815 }
13816 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013817 if (mHdrPlusRawSrcChannel) {
13818 rc = mHdrPlusRawSrcChannel->start();
13819 if (rc < 0) {
13820 LOGE("HDR+ RAW channel start failed");
13821 return rc;
13822 }
13823 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013824
13825 LOGD("All channels started");
13826 return rc;
13827}
13828
13829/*===========================================================================
13830 * FUNCTION : notifyErrorForPendingRequests
13831 *
13832 * DESCRIPTION: This function sends error for all the pending requests/buffers
13833 *
13834 * PARAMETERS : None
13835 *
13836 * RETURN : Error codes
13837 * NO_ERROR on success
13838 *
13839 *==========================================================================*/
13840int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13841{
Emilian Peev7650c122017-01-19 08:24:33 -080013842 notifyErrorFoPendingDepthData(mDepthChannel);
13843
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013844 auto pendingRequest = mPendingRequestsList.begin();
13845 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070013846
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013847 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
13848 // buffers (for which buffers aren't sent yet).
13849 while (pendingRequest != mPendingRequestsList.end() ||
13850 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
13851 if (pendingRequest == mPendingRequestsList.end() ||
13852 pendingBuffer->frame_number < pendingRequest->frame_number) {
13853 // If metadata for this frame was sent, notify about a buffer error and returns buffers
13854 // with error.
13855 for (auto &info : pendingBuffer->mPendingBufferList) {
13856 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070013857 camera3_notify_msg_t notify_msg;
13858 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13859 notify_msg.type = CAMERA3_MSG_ERROR;
13860 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013861 notify_msg.message.error.error_stream = info.stream;
13862 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013863 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013864
13865 camera3_stream_buffer_t buffer = {};
13866 buffer.acquire_fence = -1;
13867 buffer.release_fence = -1;
13868 buffer.buffer = info.buffer;
13869 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13870 buffer.stream = info.stream;
13871 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070013872 }
13873
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013874 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
13875 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
13876 pendingBuffer->frame_number > pendingRequest->frame_number) {
13877 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070013878 camera3_notify_msg_t notify_msg;
13879 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13880 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013881 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
13882 notify_msg.message.error.error_stream = nullptr;
13883 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013884 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013885
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013886 if (pendingRequest->input_buffer != nullptr) {
13887 camera3_capture_result result = {};
13888 result.frame_number = pendingRequest->frame_number;
13889 result.result = nullptr;
13890 result.input_buffer = pendingRequest->input_buffer;
13891 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013892 }
13893
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013894 mShutterDispatcher.clear(pendingRequest->frame_number);
13895 pendingRequest = mPendingRequestsList.erase(pendingRequest);
13896 } else {
13897 // If both buffers and result metadata weren't sent yet, notify about a request error
13898 // and return buffers with error.
13899 for (auto &info : pendingBuffer->mPendingBufferList) {
13900 camera3_notify_msg_t notify_msg;
13901 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13902 notify_msg.type = CAMERA3_MSG_ERROR;
13903 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13904 notify_msg.message.error.error_stream = info.stream;
13905 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
13906 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013907
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013908 camera3_stream_buffer_t buffer = {};
13909 buffer.acquire_fence = -1;
13910 buffer.release_fence = -1;
13911 buffer.buffer = info.buffer;
13912 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13913 buffer.stream = info.stream;
13914 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
13915 }
13916
13917 if (pendingRequest->input_buffer != nullptr) {
13918 camera3_capture_result result = {};
13919 result.frame_number = pendingRequest->frame_number;
13920 result.result = nullptr;
13921 result.input_buffer = pendingRequest->input_buffer;
13922 orchestrateResult(&result);
13923 }
13924
13925 mShutterDispatcher.clear(pendingRequest->frame_number);
13926 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
13927 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070013928 }
13929 }
13930
13931 /* Reset pending frame Drop list and requests list */
13932 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013933 mShutterDispatcher.clear();
13934 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070013935 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013936 LOGH("Cleared all the pending buffers ");
13937
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013938 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013939}
13940
13941bool QCamera3HardwareInterface::isOnEncoder(
13942 const cam_dimension_t max_viewfinder_size,
13943 uint32_t width, uint32_t height)
13944{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013945 return ((width > (uint32_t)max_viewfinder_size.width) ||
13946 (height > (uint32_t)max_viewfinder_size.height) ||
13947 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13948 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013949}
13950
13951/*===========================================================================
13952 * FUNCTION : setBundleInfo
13953 *
13954 * DESCRIPTION: Set bundle info for all streams that are bundle.
13955 *
13956 * PARAMETERS : None
13957 *
13958 * RETURN : NO_ERROR on success
13959 * Error codes on failure
13960 *==========================================================================*/
13961int32_t QCamera3HardwareInterface::setBundleInfo()
13962{
13963 int32_t rc = NO_ERROR;
13964
13965 if (mChannelHandle) {
13966 cam_bundle_config_t bundleInfo;
13967 memset(&bundleInfo, 0, sizeof(bundleInfo));
13968 rc = mCameraHandle->ops->get_bundle_info(
13969 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13970 if (rc != NO_ERROR) {
13971 LOGE("get_bundle_info failed");
13972 return rc;
13973 }
13974 if (mAnalysisChannel) {
13975 mAnalysisChannel->setBundleInfo(bundleInfo);
13976 }
13977 if (mSupportChannel) {
13978 mSupportChannel->setBundleInfo(bundleInfo);
13979 }
13980 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13981 it != mStreamInfo.end(); it++) {
13982 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13983 channel->setBundleInfo(bundleInfo);
13984 }
13985 if (mRawDumpChannel) {
13986 mRawDumpChannel->setBundleInfo(bundleInfo);
13987 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013988 if (mHdrPlusRawSrcChannel) {
13989 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13990 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013991 }
13992
13993 return rc;
13994}
13995
13996/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013997 * FUNCTION : setInstantAEC
13998 *
13999 * DESCRIPTION: Set Instant AEC related params.
14000 *
14001 * PARAMETERS :
14002 * @meta: CameraMetadata reference
14003 *
14004 * RETURN : NO_ERROR on success
14005 * Error codes on failure
14006 *==========================================================================*/
14007int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14008{
14009 int32_t rc = NO_ERROR;
14010 uint8_t val = 0;
14011 char prop[PROPERTY_VALUE_MAX];
14012
14013 // First try to configure instant AEC from framework metadata
14014 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14015 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14016 }
14017
14018 // If framework did not set this value, try to read from set prop.
14019 if (val == 0) {
14020 memset(prop, 0, sizeof(prop));
14021 property_get("persist.camera.instant.aec", prop, "0");
14022 val = (uint8_t)atoi(prop);
14023 }
14024
14025 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14026 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14027 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14028 mInstantAEC = val;
14029 mInstantAECSettledFrameNumber = 0;
14030 mInstantAecFrameIdxCount = 0;
14031 LOGH("instantAEC value set %d",val);
14032 if (mInstantAEC) {
14033 memset(prop, 0, sizeof(prop));
14034 property_get("persist.camera.ae.instant.bound", prop, "10");
14035 int32_t aec_frame_skip_cnt = atoi(prop);
14036 if (aec_frame_skip_cnt >= 0) {
14037 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14038 } else {
14039 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14040 rc = BAD_VALUE;
14041 }
14042 }
14043 } else {
14044 LOGE("Bad instant aec value set %d", val);
14045 rc = BAD_VALUE;
14046 }
14047 return rc;
14048}
14049
14050/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014051 * FUNCTION : get_num_overall_buffers
14052 *
14053 * DESCRIPTION: Estimate number of pending buffers across all requests.
14054 *
14055 * PARAMETERS : None
14056 *
14057 * RETURN : Number of overall pending buffers
14058 *
14059 *==========================================================================*/
14060uint32_t PendingBuffersMap::get_num_overall_buffers()
14061{
14062 uint32_t sum_buffers = 0;
14063 for (auto &req : mPendingBuffersInRequest) {
14064 sum_buffers += req.mPendingBufferList.size();
14065 }
14066 return sum_buffers;
14067}
14068
14069/*===========================================================================
14070 * FUNCTION : removeBuf
14071 *
14072 * DESCRIPTION: Remove a matching buffer from tracker.
14073 *
14074 * PARAMETERS : @buffer: image buffer for the callback
14075 *
14076 * RETURN : None
14077 *
14078 *==========================================================================*/
14079void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14080{
14081 bool buffer_found = false;
14082 for (auto req = mPendingBuffersInRequest.begin();
14083 req != mPendingBuffersInRequest.end(); req++) {
14084 for (auto k = req->mPendingBufferList.begin();
14085 k != req->mPendingBufferList.end(); k++ ) {
14086 if (k->buffer == buffer) {
14087 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14088 req->frame_number, buffer);
14089 k = req->mPendingBufferList.erase(k);
14090 if (req->mPendingBufferList.empty()) {
14091 // Remove this request from Map
14092 req = mPendingBuffersInRequest.erase(req);
14093 }
14094 buffer_found = true;
14095 break;
14096 }
14097 }
14098 if (buffer_found) {
14099 break;
14100 }
14101 }
14102 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14103 get_num_overall_buffers());
14104}
14105
14106/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014107 * FUNCTION : getBufErrStatus
14108 *
14109 * DESCRIPTION: get buffer error status
14110 *
14111 * PARAMETERS : @buffer: buffer handle
14112 *
14113 * RETURN : Error status
14114 *
14115 *==========================================================================*/
14116int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14117{
14118 for (auto& req : mPendingBuffersInRequest) {
14119 for (auto& k : req.mPendingBufferList) {
14120 if (k.buffer == buffer)
14121 return k.bufStatus;
14122 }
14123 }
14124 return CAMERA3_BUFFER_STATUS_OK;
14125}
14126
14127/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014128 * FUNCTION : setPAAFSupport
14129 *
14130 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14131 * feature mask according to stream type and filter
14132 * arrangement
14133 *
14134 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14135 * @stream_type: stream type
14136 * @filter_arrangement: filter arrangement
14137 *
14138 * RETURN : None
14139 *==========================================================================*/
14140void QCamera3HardwareInterface::setPAAFSupport(
14141 cam_feature_mask_t& feature_mask,
14142 cam_stream_type_t stream_type,
14143 cam_color_filter_arrangement_t filter_arrangement)
14144{
Thierry Strudel3d639192016-09-09 11:52:26 -070014145 switch (filter_arrangement) {
14146 case CAM_FILTER_ARRANGEMENT_RGGB:
14147 case CAM_FILTER_ARRANGEMENT_GRBG:
14148 case CAM_FILTER_ARRANGEMENT_GBRG:
14149 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014150 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14151 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014152 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014153 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14154 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014155 }
14156 break;
14157 case CAM_FILTER_ARRANGEMENT_Y:
14158 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14159 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14160 }
14161 break;
14162 default:
14163 break;
14164 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014165 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14166 feature_mask, stream_type, filter_arrangement);
14167
14168
Thierry Strudel3d639192016-09-09 11:52:26 -070014169}
14170
14171/*===========================================================================
14172* FUNCTION : getSensorMountAngle
14173*
14174* DESCRIPTION: Retrieve sensor mount angle
14175*
14176* PARAMETERS : None
14177*
14178* RETURN : sensor mount angle in uint32_t
14179*==========================================================================*/
14180uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14181{
14182 return gCamCapability[mCameraId]->sensor_mount_angle;
14183}
14184
14185/*===========================================================================
14186* FUNCTION : getRelatedCalibrationData
14187*
14188* DESCRIPTION: Retrieve related system calibration data
14189*
14190* PARAMETERS : None
14191*
14192* RETURN : Pointer of related system calibration data
14193*==========================================================================*/
14194const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14195{
14196 return (const cam_related_system_calibration_data_t *)
14197 &(gCamCapability[mCameraId]->related_cam_calibration);
14198}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014199
14200/*===========================================================================
14201 * FUNCTION : is60HzZone
14202 *
14203 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14204 *
14205 * PARAMETERS : None
14206 *
14207 * RETURN : True if in 60Hz zone, False otherwise
14208 *==========================================================================*/
14209bool QCamera3HardwareInterface::is60HzZone()
14210{
14211 time_t t = time(NULL);
14212 struct tm lt;
14213
14214 struct tm* r = localtime_r(&t, &lt);
14215
14216 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14217 return true;
14218 else
14219 return false;
14220}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014221
14222/*===========================================================================
14223 * FUNCTION : adjustBlackLevelForCFA
14224 *
14225 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14226 * of bayer CFA (Color Filter Array).
14227 *
14228 * PARAMETERS : @input: black level pattern in the order of RGGB
14229 * @output: black level pattern in the order of CFA
14230 * @color_arrangement: CFA color arrangement
14231 *
14232 * RETURN : None
14233 *==========================================================================*/
14234template<typename T>
14235void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14236 T input[BLACK_LEVEL_PATTERN_CNT],
14237 T output[BLACK_LEVEL_PATTERN_CNT],
14238 cam_color_filter_arrangement_t color_arrangement)
14239{
14240 switch (color_arrangement) {
14241 case CAM_FILTER_ARRANGEMENT_GRBG:
14242 output[0] = input[1];
14243 output[1] = input[0];
14244 output[2] = input[3];
14245 output[3] = input[2];
14246 break;
14247 case CAM_FILTER_ARRANGEMENT_GBRG:
14248 output[0] = input[2];
14249 output[1] = input[3];
14250 output[2] = input[0];
14251 output[3] = input[1];
14252 break;
14253 case CAM_FILTER_ARRANGEMENT_BGGR:
14254 output[0] = input[3];
14255 output[1] = input[2];
14256 output[2] = input[1];
14257 output[3] = input[0];
14258 break;
14259 case CAM_FILTER_ARRANGEMENT_RGGB:
14260 output[0] = input[0];
14261 output[1] = input[1];
14262 output[2] = input[2];
14263 output[3] = input[3];
14264 break;
14265 default:
14266 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14267 break;
14268 }
14269}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014270
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014271void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14272 CameraMetadata &resultMetadata,
14273 std::shared_ptr<metadata_buffer_t> settings)
14274{
14275 if (settings == nullptr) {
14276 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14277 return;
14278 }
14279
14280 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14281 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14282 }
14283
14284 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14285 String8 str((const char *)gps_methods);
14286 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14287 }
14288
14289 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14290 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14291 }
14292
14293 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14294 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14295 }
14296
14297 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14298 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14299 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14300 }
14301
14302 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14303 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14304 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14305 }
14306
14307 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14308 int32_t fwk_thumb_size[2];
14309 fwk_thumb_size[0] = thumb_size->width;
14310 fwk_thumb_size[1] = thumb_size->height;
14311 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14312 }
14313
14314 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14315 uint8_t fwk_intent = intent[0];
14316 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14317 }
14318}
14319
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014320bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14321 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14322 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014323{
14324 if (hdrPlusRequest == nullptr) return false;
14325
14326 // Check noise reduction mode is high quality.
14327 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14328 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14329 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014330 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14331 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014332 return false;
14333 }
14334
14335 // Check edge mode is high quality.
14336 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14337 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14338 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14339 return false;
14340 }
14341
14342 if (request.num_output_buffers != 1 ||
14343 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14344 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014345 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14346 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14347 request.output_buffers[0].stream->width,
14348 request.output_buffers[0].stream->height,
14349 request.output_buffers[0].stream->format);
14350 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014351 return false;
14352 }
14353
14354 // Get a YUV buffer from pic channel.
14355 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14356 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14357 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14358 if (res != OK) {
14359 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14360 __FUNCTION__, strerror(-res), res);
14361 return false;
14362 }
14363
14364 pbcamera::StreamBuffer buffer;
14365 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014366 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014367 buffer.data = yuvBuffer->buffer;
14368 buffer.dataSize = yuvBuffer->frame_len;
14369
14370 pbcamera::CaptureRequest pbRequest;
14371 pbRequest.id = request.frame_number;
14372 pbRequest.outputBuffers.push_back(buffer);
14373
14374 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014375 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014376 if (res != OK) {
14377 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14378 strerror(-res), res);
14379 return false;
14380 }
14381
14382 hdrPlusRequest->yuvBuffer = yuvBuffer;
14383 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14384
14385 return true;
14386}
14387
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014388status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14389{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014390 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14391 return OK;
14392 }
14393
14394 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14395 if (res != OK) {
14396 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14397 strerror(-res), res);
14398 return res;
14399 }
14400 gHdrPlusClientOpening = true;
14401
14402 return OK;
14403}
14404
Chien-Yu Chenee335912017-02-09 17:53:20 -080014405status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14406{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014407 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014408
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014409 if (mHdrPlusModeEnabled) {
14410 return OK;
14411 }
14412
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014413 // Check if gHdrPlusClient is opened or being opened.
14414 if (gHdrPlusClient == nullptr) {
14415 if (gHdrPlusClientOpening) {
14416 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14417 return OK;
14418 }
14419
14420 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014421 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014422 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14423 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014424 return res;
14425 }
14426
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014427 // When opening HDR+ client completes, HDR+ mode will be enabled.
14428 return OK;
14429
Chien-Yu Chenee335912017-02-09 17:53:20 -080014430 }
14431
14432 // Configure stream for HDR+.
14433 res = configureHdrPlusStreamsLocked();
14434 if (res != OK) {
14435 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014436 return res;
14437 }
14438
14439 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14440 res = gHdrPlusClient->setZslHdrPlusMode(true);
14441 if (res != OK) {
14442 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014443 return res;
14444 }
14445
14446 mHdrPlusModeEnabled = true;
14447 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14448
14449 return OK;
14450}
14451
14452void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14453{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014454 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014455 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014456 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14457 if (res != OK) {
14458 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14459 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014460
14461 // Close HDR+ client so Easel can enter low power mode.
14462 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14463 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014464 }
14465
14466 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014467 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014468 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14469}
14470
14471status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014472{
14473 pbcamera::InputConfiguration inputConfig;
14474 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14475 status_t res = OK;
14476
14477 // Configure HDR+ client streams.
14478 // Get input config.
14479 if (mHdrPlusRawSrcChannel) {
14480 // HDR+ input buffers will be provided by HAL.
14481 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14482 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14483 if (res != OK) {
14484 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14485 __FUNCTION__, strerror(-res), res);
14486 return res;
14487 }
14488
14489 inputConfig.isSensorInput = false;
14490 } else {
14491 // Sensor MIPI will send data to Easel.
14492 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014493 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014494 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14495 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14496 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14497 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14498 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14499 if (mSensorModeInfo.num_raw_bits != 10) {
14500 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14501 mSensorModeInfo.num_raw_bits);
14502 return BAD_VALUE;
14503 }
14504
14505 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014506 }
14507
14508 // Get output configurations.
14509 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014510 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014511
14512 // Easel may need to output YUV output buffers if mPictureChannel was created.
14513 pbcamera::StreamConfiguration yuvOutputConfig;
14514 if (mPictureChannel != nullptr) {
14515 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14516 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14517 if (res != OK) {
14518 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14519 __FUNCTION__, strerror(-res), res);
14520
14521 return res;
14522 }
14523
14524 outputStreamConfigs.push_back(yuvOutputConfig);
14525 }
14526
14527 // TODO: consider other channels for YUV output buffers.
14528
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014529 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014530 if (res != OK) {
14531 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14532 strerror(-res), res);
14533 return res;
14534 }
14535
14536 return OK;
14537}
14538
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014539void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14540{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014541 if (client == nullptr) {
14542 ALOGE("%s: Opened client is null.", __FUNCTION__);
14543 return;
14544 }
14545
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014546 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014547 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14548
14549 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014550 if (!gHdrPlusClientOpening) {
14551 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14552 return;
14553 }
14554
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014555 gHdrPlusClient = std::move(client);
14556 gHdrPlusClientOpening = false;
14557
14558 // Set static metadata.
14559 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14560 if (res != OK) {
14561 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14562 __FUNCTION__, strerror(-res), res);
14563 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14564 gHdrPlusClient = nullptr;
14565 return;
14566 }
14567
14568 // Enable HDR+ mode.
14569 res = enableHdrPlusModeLocked();
14570 if (res != OK) {
14571 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14572 }
14573}
14574
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014575void QCamera3HardwareInterface::onOpenFailed(status_t err)
14576{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014577 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14578 Mutex::Autolock l(gHdrPlusClientLock);
14579 gHdrPlusClientOpening = false;
14580}
14581
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014582void QCamera3HardwareInterface::onFatalError()
14583{
14584 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14585
14586 // Set HAL state to error.
14587 pthread_mutex_lock(&mMutex);
14588 mState = ERROR;
14589 pthread_mutex_unlock(&mMutex);
14590
14591 handleCameraDeviceError();
14592}
14593
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014594void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014595 const camera_metadata_t &resultMetadata)
14596{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014597 if (result != nullptr) {
14598 if (result->outputBuffers.size() != 1) {
14599 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14600 result->outputBuffers.size());
14601 return;
14602 }
14603
14604 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14605 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14606 result->outputBuffers[0].streamId);
14607 return;
14608 }
14609
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014610 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014611 HdrPlusPendingRequest pendingRequest;
14612 {
14613 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14614 auto req = mHdrPlusPendingRequests.find(result->requestId);
14615 pendingRequest = req->second;
14616 }
14617
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014618 // Update the result metadata with the settings of the HDR+ still capture request because
14619 // the result metadata belongs to a ZSL buffer.
14620 CameraMetadata metadata;
14621 metadata = &resultMetadata;
14622 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14623 camera_metadata_t* updatedResultMetadata = metadata.release();
14624
14625 QCamera3PicChannel *picChannel =
14626 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14627
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014628 // Check if dumping HDR+ YUV output is enabled.
14629 char prop[PROPERTY_VALUE_MAX];
14630 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14631 bool dumpYuvOutput = atoi(prop);
14632
14633 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014634 // Dump yuv buffer to a ppm file.
14635 pbcamera::StreamConfiguration outputConfig;
14636 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14637 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14638 if (rc == OK) {
14639 char buf[FILENAME_MAX] = {};
14640 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14641 result->requestId, result->outputBuffers[0].streamId,
14642 outputConfig.image.width, outputConfig.image.height);
14643
14644 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14645 } else {
14646 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14647 __FUNCTION__, strerror(-rc), rc);
14648 }
14649 }
14650
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014651 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14652 auto halMetadata = std::make_shared<metadata_buffer_t>();
14653 clear_metadata_buffer(halMetadata.get());
14654
14655 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14656 // encoding.
14657 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14658 halStreamId, /*minFrameDuration*/0);
14659 if (res == OK) {
14660 // Return the buffer to pic channel for encoding.
14661 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14662 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14663 halMetadata);
14664 } else {
14665 // Return the buffer without encoding.
14666 // TODO: This should not happen but we may want to report an error buffer to camera
14667 // service.
14668 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14669 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14670 strerror(-res), res);
14671 }
14672
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014673 // Find the timestamp
14674 camera_metadata_ro_entry_t entry;
14675 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14676 ANDROID_SENSOR_TIMESTAMP, &entry);
14677 if (res != OK) {
14678 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14679 __FUNCTION__, result->requestId, strerror(-res), res);
14680 } else {
14681 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14682 }
14683
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014684 // Send HDR+ metadata to framework.
14685 {
14686 pthread_mutex_lock(&mMutex);
14687
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014688 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14689 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014690 pthread_mutex_unlock(&mMutex);
14691 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014692
14693 // Remove the HDR+ pending request.
14694 {
14695 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14696 auto req = mHdrPlusPendingRequests.find(result->requestId);
14697 mHdrPlusPendingRequests.erase(req);
14698 }
14699 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014700}
14701
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014702void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14703{
14704 if (failedResult == nullptr) {
14705 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14706 return;
14707 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014708
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014709 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014710
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014711 // Remove the pending HDR+ request.
14712 {
14713 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14714 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14715
14716 // Return the buffer to pic channel.
14717 QCamera3PicChannel *picChannel =
14718 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14719 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14720
14721 mHdrPlusPendingRequests.erase(pendingRequest);
14722 }
14723
14724 pthread_mutex_lock(&mMutex);
14725
14726 // Find the pending buffers.
14727 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14728 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14729 if (pendingBuffers->frame_number == failedResult->requestId) {
14730 break;
14731 }
14732 pendingBuffers++;
14733 }
14734
14735 // Send out buffer errors for the pending buffers.
14736 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14737 std::vector<camera3_stream_buffer_t> streamBuffers;
14738 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14739 // Prepare a stream buffer.
14740 camera3_stream_buffer_t streamBuffer = {};
14741 streamBuffer.stream = buffer.stream;
14742 streamBuffer.buffer = buffer.buffer;
14743 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14744 streamBuffer.acquire_fence = -1;
14745 streamBuffer.release_fence = -1;
14746
14747 streamBuffers.push_back(streamBuffer);
14748
14749 // Send out error buffer event.
14750 camera3_notify_msg_t notify_msg = {};
14751 notify_msg.type = CAMERA3_MSG_ERROR;
14752 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14753 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14754 notify_msg.message.error.error_stream = buffer.stream;
14755
14756 orchestrateNotify(&notify_msg);
14757 }
14758
14759 camera3_capture_result_t result = {};
14760 result.frame_number = pendingBuffers->frame_number;
14761 result.num_output_buffers = streamBuffers.size();
14762 result.output_buffers = &streamBuffers[0];
14763
14764 // Send out result with buffer errors.
14765 orchestrateResult(&result);
14766
14767 // Remove pending buffers.
14768 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14769 }
14770
14771 // Remove pending request.
14772 auto halRequest = mPendingRequestsList.begin();
14773 while (halRequest != mPendingRequestsList.end()) {
14774 if (halRequest->frame_number == failedResult->requestId) {
14775 mPendingRequestsList.erase(halRequest);
14776 break;
14777 }
14778 halRequest++;
14779 }
14780
14781 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014782}
14783
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014784
14785ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14786 mParent(parent) {}
14787
14788void ShutterDispatcher::expectShutter(uint32_t frameNumber)
14789{
14790 std::lock_guard<std::mutex> lock(mLock);
14791 mShutters.emplace(frameNumber, Shutter());
14792}
14793
14794void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
14795{
14796 std::lock_guard<std::mutex> lock(mLock);
14797
14798 // Make this frame's shutter ready.
14799 auto shutter = mShutters.find(frameNumber);
14800 if (shutter == mShutters.end()) {
14801 // Shutter was already sent.
14802 return;
14803 }
14804
14805 shutter->second.ready = true;
14806 shutter->second.timestamp = timestamp;
14807
14808 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
14809 shutter = mShutters.begin();
14810 while (shutter != mShutters.end()) {
14811 if (!shutter->second.ready) {
14812 // If this shutter is not ready, the following shutters can't be sent.
14813 break;
14814 }
14815
14816 camera3_notify_msg_t msg = {};
14817 msg.type = CAMERA3_MSG_SHUTTER;
14818 msg.message.shutter.frame_number = shutter->first;
14819 msg.message.shutter.timestamp = shutter->second.timestamp;
14820 mParent->orchestrateNotify(&msg);
14821
14822 shutter = mShutters.erase(shutter);
14823 }
14824}
14825
14826void ShutterDispatcher::clear(uint32_t frameNumber)
14827{
14828 std::lock_guard<std::mutex> lock(mLock);
14829 mShutters.erase(frameNumber);
14830}
14831
14832void ShutterDispatcher::clear()
14833{
14834 std::lock_guard<std::mutex> lock(mLock);
14835
14836 // Log errors for stale shutters.
14837 for (auto &shutter : mShutters) {
14838 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
14839 __FUNCTION__, shutter.first, shutter.second.ready,
14840 shutter.second.timestamp);
14841 }
14842 mShutters.clear();
14843}
14844
14845OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
14846 mParent(parent) {}
14847
14848status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
14849{
14850 std::lock_guard<std::mutex> lock(mLock);
14851 mStreamBuffers.clear();
14852 if (!streamList) {
14853 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
14854 return -EINVAL;
14855 }
14856
14857 // Create a "frame-number -> buffer" map for each stream.
14858 for (uint32_t i = 0; i < streamList->num_streams; i++) {
14859 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
14860 }
14861
14862 return OK;
14863}
14864
14865status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
14866{
14867 std::lock_guard<std::mutex> lock(mLock);
14868
14869 // Find the "frame-number -> buffer" map for the stream.
14870 auto buffers = mStreamBuffers.find(stream);
14871 if (buffers == mStreamBuffers.end()) {
14872 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
14873 return -EINVAL;
14874 }
14875
14876 // Create an unready buffer for this frame number.
14877 buffers->second.emplace(frameNumber, Buffer());
14878 return OK;
14879}
14880
14881void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
14882 const camera3_stream_buffer_t &buffer)
14883{
14884 std::lock_guard<std::mutex> lock(mLock);
14885
14886 // Find the frame number -> buffer map for the stream.
14887 auto buffers = mStreamBuffers.find(buffer.stream);
14888 if (buffers == mStreamBuffers.end()) {
14889 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
14890 return;
14891 }
14892
14893 // Find the unready buffer this frame number and mark it ready.
14894 auto pendingBuffer = buffers->second.find(frameNumber);
14895 if (pendingBuffer == buffers->second.end()) {
14896 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
14897 return;
14898 }
14899
14900 pendingBuffer->second.ready = true;
14901 pendingBuffer->second.buffer = buffer;
14902
14903 // Iterate through the buffers and send out buffers until the one that's not ready yet.
14904 pendingBuffer = buffers->second.begin();
14905 while (pendingBuffer != buffers->second.end()) {
14906 if (!pendingBuffer->second.ready) {
14907 // If this buffer is not ready, the following buffers can't be sent.
14908 break;
14909 }
14910
14911 camera3_capture_result_t result = {};
14912 result.frame_number = pendingBuffer->first;
14913 result.num_output_buffers = 1;
14914 result.output_buffers = &pendingBuffer->second.buffer;
14915
14916 // Send out result with buffer errors.
14917 mParent->orchestrateResult(&result);
14918
14919 pendingBuffer = buffers->second.erase(pendingBuffer);
14920 }
14921}
14922
14923void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
14924{
14925 std::lock_guard<std::mutex> lock(mLock);
14926
14927 // Log errors for stale buffers.
14928 for (auto &buffers : mStreamBuffers) {
14929 for (auto &buffer : buffers.second) {
14930 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
14931 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
14932 }
14933 buffers.second.clear();
14934 }
14935
14936 if (clearConfiguredStreams) {
14937 mStreamBuffers.clear();
14938 }
14939}
14940
Thierry Strudel3d639192016-09-09 11:52:26 -070014941}; //end namespace qcamera