blob: b0fd0f27ed11ce4b54c2bff47562207e9f983695 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +010099#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Jason Lee8ce36fa2017-04-19 19:40:37 -0700116/* Face rect indices */
117#define FACE_LEFT 0
118#define FACE_TOP 1
119#define FACE_RIGHT 2
120#define FACE_BOTTOM 3
121#define FACE_WEIGHT 4
122
Thierry Strudel04e026f2016-10-10 11:27:36 -0700123/* Face landmarks indices */
124#define LEFT_EYE_X 0
125#define LEFT_EYE_Y 1
126#define RIGHT_EYE_X 2
127#define RIGHT_EYE_Y 3
128#define MOUTH_X 4
129#define MOUTH_Y 5
130#define TOTAL_LANDMARK_INDICES 6
131
Zhijun He2a5df222017-04-04 18:20:38 -0700132// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700133#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700134
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700135// Whether to check for the GPU stride padding, or use the default
136//#define CHECK_GPU_PIXEL_ALIGNMENT
137
Thierry Strudel3d639192016-09-09 11:52:26 -0700138cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
139const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
140extern pthread_mutex_t gCamLock;
141volatile uint32_t gCamHal3LogLevel = 1;
142extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800144// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700146std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
148std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
149bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700150std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700151bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700152bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800154// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
155bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700157std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700158
Thierry Strudel3d639192016-09-09 11:52:26 -0700159
160const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
161 {"On", CAM_CDS_MODE_ON},
162 {"Off", CAM_CDS_MODE_OFF},
163 {"Auto",CAM_CDS_MODE_AUTO}
164};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700165const QCamera3HardwareInterface::QCameraMap<
166 camera_metadata_enum_android_video_hdr_mode_t,
167 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
168 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
169 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
170};
171
Thierry Strudel54dc9782017-02-15 12:12:10 -0800172const QCamera3HardwareInterface::QCameraMap<
173 camera_metadata_enum_android_binning_correction_mode_t,
174 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
175 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
176 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
177};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700178
179const QCamera3HardwareInterface::QCameraMap<
180 camera_metadata_enum_android_ir_mode_t,
181 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
182 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
183 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
184 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
185};
Thierry Strudel3d639192016-09-09 11:52:26 -0700186
187const QCamera3HardwareInterface::QCameraMap<
188 camera_metadata_enum_android_control_effect_mode_t,
189 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
190 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
191 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
192 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
193 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
194 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
195 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
197 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
198 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202 camera_metadata_enum_android_control_awb_mode_t,
203 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
204 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
205 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
206 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
207 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
208 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
209 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
210 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
211 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
212 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
213};
214
215const QCamera3HardwareInterface::QCameraMap<
216 camera_metadata_enum_android_control_scene_mode_t,
217 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
218 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
219 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
220 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
222 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
223 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
225 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
226 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
227 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
228 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
229 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
230 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
231 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
232 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800233 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
234 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700235};
236
237const QCamera3HardwareInterface::QCameraMap<
238 camera_metadata_enum_android_control_af_mode_t,
239 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
240 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
241 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
242 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
243 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
244 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
245 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
246 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
247};
248
249const QCamera3HardwareInterface::QCameraMap<
250 camera_metadata_enum_android_color_correction_aberration_mode_t,
251 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
252 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
253 CAM_COLOR_CORRECTION_ABERRATION_OFF },
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
255 CAM_COLOR_CORRECTION_ABERRATION_FAST },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
257 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
258};
259
260const QCamera3HardwareInterface::QCameraMap<
261 camera_metadata_enum_android_control_ae_antibanding_mode_t,
262 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
264 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
267};
268
269const QCamera3HardwareInterface::QCameraMap<
270 camera_metadata_enum_android_control_ae_mode_t,
271 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
272 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
273 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
274 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
275 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
Shuzhen Wang3d11a642017-08-18 09:57:29 -0700276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
277 { (camera_metadata_enum_android_control_ae_mode_t)
278 NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
Thierry Strudel3d639192016-09-09 11:52:26 -0700279};
280
281const QCamera3HardwareInterface::QCameraMap<
282 camera_metadata_enum_android_flash_mode_t,
283 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
284 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
285 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
286 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
287};
288
289const QCamera3HardwareInterface::QCameraMap<
290 camera_metadata_enum_android_statistics_face_detect_mode_t,
291 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
292 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
293 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
295};
296
297const QCamera3HardwareInterface::QCameraMap<
298 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
299 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
300 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
301 CAM_FOCUS_UNCALIBRATED },
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
303 CAM_FOCUS_APPROXIMATE },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
305 CAM_FOCUS_CALIBRATED }
306};
307
308const QCamera3HardwareInterface::QCameraMap<
309 camera_metadata_enum_android_lens_state_t,
310 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
311 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
312 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
313};
314
315const int32_t available_thumbnail_sizes[] = {0, 0,
316 176, 144,
317 240, 144,
318 256, 144,
319 240, 160,
320 256, 154,
321 240, 240,
322 320, 240};
323
324const QCamera3HardwareInterface::QCameraMap<
325 camera_metadata_enum_android_sensor_test_pattern_mode_t,
326 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
333};
334
335/* Since there is no mapping for all the options some Android enum are not listed.
336 * Also, the order in this list is important because while mapping from HAL to Android it will
337 * traverse from lower to higher index which means that for HAL values that are map to different
338 * Android values, the traverse logic will select the first one found.
339 */
340const QCamera3HardwareInterface::QCameraMap<
341 camera_metadata_enum_android_sensor_reference_illuminant1_t,
342 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
359};
360
361const QCamera3HardwareInterface::QCameraMap<
362 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
363 { 60, CAM_HFR_MODE_60FPS},
364 { 90, CAM_HFR_MODE_90FPS},
365 { 120, CAM_HFR_MODE_120FPS},
366 { 150, CAM_HFR_MODE_150FPS},
367 { 180, CAM_HFR_MODE_180FPS},
368 { 210, CAM_HFR_MODE_210FPS},
369 { 240, CAM_HFR_MODE_240FPS},
370 { 480, CAM_HFR_MODE_480FPS},
371};
372
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700373const QCamera3HardwareInterface::QCameraMap<
374 qcamera3_ext_instant_aec_mode_t,
375 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
376 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
377 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
378 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
379};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800380
381const QCamera3HardwareInterface::QCameraMap<
382 qcamera3_ext_exposure_meter_mode_t,
383 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
384 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
386 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
387 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
388 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
389 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
390 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
391};
392
393const QCamera3HardwareInterface::QCameraMap<
394 qcamera3_ext_iso_mode_t,
395 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
396 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
397 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
398 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
399 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
400 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
401 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
402 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
403 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
404};
405
Thierry Strudel3d639192016-09-09 11:52:26 -0700406camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
407 .initialize = QCamera3HardwareInterface::initialize,
408 .configure_streams = QCamera3HardwareInterface::configure_streams,
409 .register_stream_buffers = NULL,
410 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
411 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
412 .get_metadata_vendor_tag_ops = NULL,
413 .dump = QCamera3HardwareInterface::dump,
414 .flush = QCamera3HardwareInterface::flush,
415 .reserved = {0},
416};
417
418// initialise to some default value
419uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
420
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700421static inline void logEaselEvent(const char *tag, const char *event) {
422 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
423 struct timespec ts = {};
424 static int64_t kMsPerSec = 1000;
425 static int64_t kNsPerMs = 1000000;
426 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
427 if (res != OK) {
428 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
429 } else {
430 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
431 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
432 }
433 }
434}
435
Thierry Strudel3d639192016-09-09 11:52:26 -0700436/*===========================================================================
437 * FUNCTION : QCamera3HardwareInterface
438 *
439 * DESCRIPTION: constructor of QCamera3HardwareInterface
440 *
441 * PARAMETERS :
442 * @cameraId : camera ID
443 *
444 * RETURN : none
445 *==========================================================================*/
446QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
447 const camera_module_callbacks_t *callbacks)
448 : mCameraId(cameraId),
449 mCameraHandle(NULL),
450 mCameraInitialized(false),
451 mCallbackOps(NULL),
452 mMetadataChannel(NULL),
453 mPictureChannel(NULL),
454 mRawChannel(NULL),
455 mSupportChannel(NULL),
456 mAnalysisChannel(NULL),
457 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700458 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700459 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800460 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100461 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800462 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700463 mChannelHandle(0),
464 mFirstConfiguration(true),
465 mFlush(false),
466 mFlushPerf(false),
467 mParamHeap(NULL),
468 mParameters(NULL),
469 mPrevParameters(NULL),
470 m_bIsVideo(false),
471 m_bIs4KVideo(false),
472 m_bEisSupportedSize(false),
473 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800474 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700475 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700476 mShutterDispatcher(this),
477 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700478 mMinProcessedFrameDuration(0),
479 mMinJpegFrameDuration(0),
480 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100481 mExpectedFrameDuration(0),
482 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700483 mMetaFrameCount(0U),
484 mUpdateDebugLevel(false),
485 mCallbacks(callbacks),
486 mCaptureIntent(0),
487 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700488 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800489 /* DevCamDebug metadata internal m control*/
490 mDevCamDebugMetaEnable(0),
491 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700492 mBatchSize(0),
493 mToBeQueuedVidBufs(0),
494 mHFRVideoFps(DEFAULT_VIDEO_FPS),
495 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800496 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800497 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700498 mFirstFrameNumberInBatch(0),
499 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800500 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700501 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
502 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000503 mPDSupported(false),
504 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700505 mInstantAEC(false),
506 mResetInstantAEC(false),
507 mInstantAECSettledFrameNumber(0),
508 mAecSkipDisplayFrameBound(0),
509 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700510 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800511 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700513 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700514 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700515 mState(CLOSED),
516 mIsDeviceLinked(false),
517 mIsMainCamera(true),
518 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700519 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800520 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800521 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700522 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800523 mIsApInputUsedForHdrPlus(false),
524 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700525 m_bSensorHDREnabled(false),
526 mAfTrigger()
Thierry Strudel3d639192016-09-09 11:52:26 -0700527{
528 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700529 mCommon.init(gCamCapability[cameraId]);
530 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700531#ifndef USE_HAL_3_3
532 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
533#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700535#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700536 mCameraDevice.common.close = close_camera_device;
537 mCameraDevice.ops = &mCameraOps;
538 mCameraDevice.priv = this;
539 gCamCapability[cameraId]->version = CAM_HAL_V3;
540 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
541 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
542 gCamCapability[cameraId]->min_num_pp_bufs = 3;
543
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800544 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700545
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800546 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700547 mPendingLiveRequest = 0;
548 mCurrentRequestId = -1;
549 pthread_mutex_init(&mMutex, NULL);
550
551 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
552 mDefaultMetadata[i] = NULL;
553
554 // Getting system props of different kinds
555 char prop[PROPERTY_VALUE_MAX];
556 memset(prop, 0, sizeof(prop));
557 property_get("persist.camera.raw.dump", prop, "0");
558 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800559 property_get("persist.camera.hal3.force.hdr", prop, "0");
560 mForceHdrSnapshot = atoi(prop);
561
Thierry Strudel3d639192016-09-09 11:52:26 -0700562 if (mEnableRawDump)
563 LOGD("Raw dump from Camera HAL enabled");
564
565 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
566 memset(mLdafCalib, 0, sizeof(mLdafCalib));
567
568 memset(prop, 0, sizeof(prop));
569 property_get("persist.camera.tnr.preview", prop, "0");
570 m_bTnrPreview = (uint8_t)atoi(prop);
571
572 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800573 property_get("persist.camera.swtnr.preview", prop, "1");
574 m_bSwTnrPreview = (uint8_t)atoi(prop);
575
576 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700577 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700578 m_bTnrVideo = (uint8_t)atoi(prop);
579
580 memset(prop, 0, sizeof(prop));
581 property_get("persist.camera.avtimer.debug", prop, "0");
582 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800583 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700584
Thierry Strudel54dc9782017-02-15 12:12:10 -0800585 memset(prop, 0, sizeof(prop));
586 property_get("persist.camera.cacmode.disable", prop, "0");
587 m_cacModeDisabled = (uint8_t)atoi(prop);
588
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700589 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700590 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700591
Thierry Strudel3d639192016-09-09 11:52:26 -0700592 //Load and read GPU library.
593 lib_surface_utils = NULL;
594 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700595 mSurfaceStridePadding = CAM_PAD_TO_64;
596#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700597 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
598 if (lib_surface_utils) {
599 *(void **)&LINK_get_surface_pixel_alignment =
600 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
601 if (LINK_get_surface_pixel_alignment) {
602 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
603 }
604 dlclose(lib_surface_utils);
605 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700606#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000607 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
608 mPDSupported = (0 <= mPDIndex) ? true : false;
609
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700610 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700611}
612
613/*===========================================================================
614 * FUNCTION : ~QCamera3HardwareInterface
615 *
616 * DESCRIPTION: destructor of QCamera3HardwareInterface
617 *
618 * PARAMETERS : none
619 *
620 * RETURN : none
621 *==========================================================================*/
622QCamera3HardwareInterface::~QCamera3HardwareInterface()
623{
624 LOGD("E");
625
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800626 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700627
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800628 // Disable power hint and enable the perf lock for close camera
629 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
630 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
631
632 // unlink of dualcam during close camera
633 if (mIsDeviceLinked) {
634 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
635 &m_pDualCamCmdPtr->bundle_info;
636 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
637 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
638 pthread_mutex_lock(&gCamLock);
639
640 if (mIsMainCamera == 1) {
641 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
642 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
643 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
644 // related session id should be session id of linked session
645 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
646 } else {
647 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
648 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
649 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
650 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
651 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800652 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800653 pthread_mutex_unlock(&gCamLock);
654
655 rc = mCameraHandle->ops->set_dual_cam_cmd(
656 mCameraHandle->camera_handle);
657 if (rc < 0) {
658 LOGE("Dualcam: Unlink failed, but still proceed to close");
659 }
660 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700661
662 /* We need to stop all streams before deleting any stream */
663 if (mRawDumpChannel) {
664 mRawDumpChannel->stop();
665 }
666
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700667 if (mHdrPlusRawSrcChannel) {
668 mHdrPlusRawSrcChannel->stop();
669 }
670
Thierry Strudel3d639192016-09-09 11:52:26 -0700671 // NOTE: 'camera3_stream_t *' objects are already freed at
672 // this stage by the framework
673 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
674 it != mStreamInfo.end(); it++) {
675 QCamera3ProcessingChannel *channel = (*it)->channel;
676 if (channel) {
677 channel->stop();
678 }
679 }
680 if (mSupportChannel)
681 mSupportChannel->stop();
682
683 if (mAnalysisChannel) {
684 mAnalysisChannel->stop();
685 }
686 if (mMetadataChannel) {
687 mMetadataChannel->stop();
688 }
689 if (mChannelHandle) {
690 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -0700691 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700692 LOGD("stopping channel %d", mChannelHandle);
693 }
694
695 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
696 it != mStreamInfo.end(); it++) {
697 QCamera3ProcessingChannel *channel = (*it)->channel;
698 if (channel)
699 delete channel;
700 free (*it);
701 }
702 if (mSupportChannel) {
703 delete mSupportChannel;
704 mSupportChannel = NULL;
705 }
706
707 if (mAnalysisChannel) {
708 delete mAnalysisChannel;
709 mAnalysisChannel = NULL;
710 }
711 if (mRawDumpChannel) {
712 delete mRawDumpChannel;
713 mRawDumpChannel = NULL;
714 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700715 if (mHdrPlusRawSrcChannel) {
716 delete mHdrPlusRawSrcChannel;
717 mHdrPlusRawSrcChannel = NULL;
718 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700719 if (mDummyBatchChannel) {
720 delete mDummyBatchChannel;
721 mDummyBatchChannel = NULL;
722 }
723
724 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800725 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700726
727 if (mMetadataChannel) {
728 delete mMetadataChannel;
729 mMetadataChannel = NULL;
730 }
731
732 /* Clean up all channels */
733 if (mCameraInitialized) {
734 if(!mFirstConfiguration){
735 //send the last unconfigure
736 cam_stream_size_info_t stream_config_info;
737 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
738 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
739 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800740 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700741 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700742 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700743 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
744 stream_config_info);
745 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
746 if (rc < 0) {
747 LOGE("set_parms failed for unconfigure");
748 }
749 }
750 deinitParameters();
751 }
752
753 if (mChannelHandle) {
754 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
755 mChannelHandle);
756 LOGH("deleting channel %d", mChannelHandle);
757 mChannelHandle = 0;
758 }
759
760 if (mState != CLOSED)
761 closeCamera();
762
763 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
764 req.mPendingBufferList.clear();
765 }
766 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700767 for (pendingRequestIterator i = mPendingRequestsList.begin();
768 i != mPendingRequestsList.end();) {
769 i = erasePendingRequest(i);
770 }
771 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
772 if (mDefaultMetadata[i])
773 free_camera_metadata(mDefaultMetadata[i]);
774
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800775 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700776
777 pthread_cond_destroy(&mRequestCond);
778
779 pthread_cond_destroy(&mBuffersCond);
780
781 pthread_mutex_destroy(&mMutex);
782 LOGD("X");
783}
784
785/*===========================================================================
786 * FUNCTION : erasePendingRequest
787 *
788 * DESCRIPTION: function to erase a desired pending request after freeing any
789 * allocated memory
790 *
791 * PARAMETERS :
792 * @i : iterator pointing to pending request to be erased
793 *
794 * RETURN : iterator pointing to the next request
795 *==========================================================================*/
796QCamera3HardwareInterface::pendingRequestIterator
797 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
798{
799 if (i->input_buffer != NULL) {
800 free(i->input_buffer);
801 i->input_buffer = NULL;
802 }
803 if (i->settings != NULL)
804 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100805
806 mExpectedInflightDuration -= i->expectedFrameDuration;
807 if (mExpectedInflightDuration < 0) {
808 LOGE("Negative expected in-flight duration!");
809 mExpectedInflightDuration = 0;
810 }
811
Thierry Strudel3d639192016-09-09 11:52:26 -0700812 return mPendingRequestsList.erase(i);
813}
814
815/*===========================================================================
816 * FUNCTION : camEvtHandle
817 *
818 * DESCRIPTION: Function registered to mm-camera-interface to handle events
819 *
820 * PARAMETERS :
821 * @camera_handle : interface layer camera handle
822 * @evt : ptr to event
823 * @user_data : user data ptr
824 *
825 * RETURN : none
826 *==========================================================================*/
827void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
828 mm_camera_event_t *evt,
829 void *user_data)
830{
831 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
832 if (obj && evt) {
833 switch(evt->server_event_type) {
834 case CAM_EVENT_TYPE_DAEMON_DIED:
835 pthread_mutex_lock(&obj->mMutex);
836 obj->mState = ERROR;
837 pthread_mutex_unlock(&obj->mMutex);
838 LOGE("Fatal, camera daemon died");
839 break;
840
841 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
842 LOGD("HAL got request pull from Daemon");
843 pthread_mutex_lock(&obj->mMutex);
844 obj->mWokenUpByDaemon = true;
845 obj->unblockRequestIfNecessary();
846 pthread_mutex_unlock(&obj->mMutex);
847 break;
848
849 default:
850 LOGW("Warning: Unhandled event %d",
851 evt->server_event_type);
852 break;
853 }
854 } else {
855 LOGE("NULL user_data/evt");
856 }
857}
858
859/*===========================================================================
860 * FUNCTION : openCamera
861 *
862 * DESCRIPTION: open camera
863 *
864 * PARAMETERS :
865 * @hw_device : double ptr for camera device struct
866 *
867 * RETURN : int32_t type of status
868 * NO_ERROR -- success
869 * none-zero failure code
870 *==========================================================================*/
871int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
872{
873 int rc = 0;
874 if (mState != CLOSED) {
875 *hw_device = NULL;
876 return PERMISSION_DENIED;
877 }
878
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700879 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800880 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700881 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
882 mCameraId);
883
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700884 if (mCameraHandle) {
885 LOGE("Failure: Camera already opened");
886 return ALREADY_EXISTS;
887 }
888
889 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700890 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700891 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700892 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700893 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700894 if (rc != 0) {
895 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
896 return rc;
897 }
898 }
899 }
900
Thierry Strudel3d639192016-09-09 11:52:26 -0700901 rc = openCamera();
902 if (rc == 0) {
903 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800904 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700905 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700906
907 // Suspend Easel because opening camera failed.
908 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700909 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700910 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
911 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700912 if (suspendErr != 0) {
913 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
914 strerror(-suspendErr), suspendErr);
915 }
916 }
917 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800918 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700919
Thierry Strudel3d639192016-09-09 11:52:26 -0700920 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
921 mCameraId, rc);
922
923 if (rc == NO_ERROR) {
924 mState = OPENED;
925 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800926
Thierry Strudel3d639192016-09-09 11:52:26 -0700927 return rc;
928}
929
930/*===========================================================================
931 * FUNCTION : openCamera
932 *
933 * DESCRIPTION: open camera
934 *
935 * PARAMETERS : none
936 *
937 * RETURN : int32_t type of status
938 * NO_ERROR -- success
939 * none-zero failure code
940 *==========================================================================*/
941int QCamera3HardwareInterface::openCamera()
942{
943 int rc = 0;
944 char value[PROPERTY_VALUE_MAX];
945
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800946 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800947
Thierry Strudel3d639192016-09-09 11:52:26 -0700948 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
949 if (rc < 0) {
950 LOGE("Failed to reserve flash for camera id: %d",
951 mCameraId);
952 return UNKNOWN_ERROR;
953 }
954
955 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
956 if (rc) {
957 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
958 return rc;
959 }
960
961 if (!mCameraHandle) {
962 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
963 return -ENODEV;
964 }
965
966 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
967 camEvtHandle, (void *)this);
968
969 if (rc < 0) {
970 LOGE("Error, failed to register event callback");
971 /* Not closing camera here since it is already handled in destructor */
972 return FAILED_TRANSACTION;
973 }
974
975 mExifParams.debug_params =
976 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
977 if (mExifParams.debug_params) {
978 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
979 } else {
980 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
981 return NO_MEMORY;
982 }
983 mFirstConfiguration = true;
984
985 //Notify display HAL that a camera session is active.
986 //But avoid calling the same during bootup because camera service might open/close
987 //cameras at boot time during its initialization and display service will also internally
988 //wait for camera service to initialize first while calling this display API, resulting in a
989 //deadlock situation. Since boot time camera open/close calls are made only to fetch
990 //capabilities, no need of this display bw optimization.
991 //Use "service.bootanim.exit" property to know boot status.
992 property_get("service.bootanim.exit", value, "0");
993 if (atoi(value) == 1) {
994 pthread_mutex_lock(&gCamLock);
995 if (gNumCameraSessions++ == 0) {
996 setCameraLaunchStatus(true);
997 }
998 pthread_mutex_unlock(&gCamLock);
999 }
1000
1001 //fill the session id needed while linking dual cam
1002 pthread_mutex_lock(&gCamLock);
1003 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1004 &sessionId[mCameraId]);
1005 pthread_mutex_unlock(&gCamLock);
1006
1007 if (rc < 0) {
1008 LOGE("Error, failed to get sessiion id");
1009 return UNKNOWN_ERROR;
1010 } else {
1011 //Allocate related cam sync buffer
1012 //this is needed for the payload that goes along with bundling cmd for related
1013 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001014 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1015 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001016 if(rc != OK) {
1017 rc = NO_MEMORY;
1018 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1019 return NO_MEMORY;
1020 }
1021
1022 //Map memory for related cam sync buffer
1023 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001024 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1025 m_pDualCamCmdHeap->getFd(0),
1026 sizeof(cam_dual_camera_cmd_info_t),
1027 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001028 if(rc < 0) {
1029 LOGE("Dualcam: failed to map Related cam sync buffer");
1030 rc = FAILED_TRANSACTION;
1031 return NO_MEMORY;
1032 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001033 m_pDualCamCmdPtr =
1034 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001035 }
1036
1037 LOGH("mCameraId=%d",mCameraId);
1038
1039 return NO_ERROR;
1040}
1041
1042/*===========================================================================
1043 * FUNCTION : closeCamera
1044 *
1045 * DESCRIPTION: close camera
1046 *
1047 * PARAMETERS : none
1048 *
1049 * RETURN : int32_t type of status
1050 * NO_ERROR -- success
1051 * none-zero failure code
1052 *==========================================================================*/
1053int QCamera3HardwareInterface::closeCamera()
1054{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001055 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001056 int rc = NO_ERROR;
1057 char value[PROPERTY_VALUE_MAX];
1058
1059 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1060 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001061
1062 // unmap memory for related cam sync buffer
1063 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001064 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001065 if (NULL != m_pDualCamCmdHeap) {
1066 m_pDualCamCmdHeap->deallocate();
1067 delete m_pDualCamCmdHeap;
1068 m_pDualCamCmdHeap = NULL;
1069 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001070 }
1071
Thierry Strudel3d639192016-09-09 11:52:26 -07001072 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1073 mCameraHandle = NULL;
1074
1075 //reset session id to some invalid id
1076 pthread_mutex_lock(&gCamLock);
1077 sessionId[mCameraId] = 0xDEADBEEF;
1078 pthread_mutex_unlock(&gCamLock);
1079
1080 //Notify display HAL that there is no active camera session
1081 //but avoid calling the same during bootup. Refer to openCamera
1082 //for more details.
1083 property_get("service.bootanim.exit", value, "0");
1084 if (atoi(value) == 1) {
1085 pthread_mutex_lock(&gCamLock);
1086 if (--gNumCameraSessions == 0) {
1087 setCameraLaunchStatus(false);
1088 }
1089 pthread_mutex_unlock(&gCamLock);
1090 }
1091
Thierry Strudel3d639192016-09-09 11:52:26 -07001092 if (mExifParams.debug_params) {
1093 free(mExifParams.debug_params);
1094 mExifParams.debug_params = NULL;
1095 }
1096 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1097 LOGW("Failed to release flash for camera id: %d",
1098 mCameraId);
1099 }
1100 mState = CLOSED;
1101 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1102 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001103
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001104 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001105 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1106 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001107 if (gHdrPlusClient != nullptr) {
1108 // Disable HDR+ mode.
1109 disableHdrPlusModeLocked();
1110 // Disconnect Easel if it's connected.
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001111 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001112 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001113 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001114
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001115 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001116 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001117 if (rc != 0) {
1118 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1119 }
1120
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001121 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001122 if (rc != 0) {
1123 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1124 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001125 }
1126 }
1127
Thierry Strudel3d639192016-09-09 11:52:26 -07001128 return rc;
1129}
1130
1131/*===========================================================================
1132 * FUNCTION : initialize
1133 *
1134 * DESCRIPTION: Initialize frameworks callback functions
1135 *
1136 * PARAMETERS :
1137 * @callback_ops : callback function to frameworks
1138 *
1139 * RETURN :
1140 *
1141 *==========================================================================*/
1142int QCamera3HardwareInterface::initialize(
1143 const struct camera3_callback_ops *callback_ops)
1144{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001145 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001146 int rc;
1147
1148 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1149 pthread_mutex_lock(&mMutex);
1150
1151 // Validate current state
1152 switch (mState) {
1153 case OPENED:
1154 /* valid state */
1155 break;
1156 default:
1157 LOGE("Invalid state %d", mState);
1158 rc = -ENODEV;
1159 goto err1;
1160 }
1161
1162 rc = initParameters();
1163 if (rc < 0) {
1164 LOGE("initParamters failed %d", rc);
1165 goto err1;
1166 }
1167 mCallbackOps = callback_ops;
1168
1169 mChannelHandle = mCameraHandle->ops->add_channel(
1170 mCameraHandle->camera_handle, NULL, NULL, this);
1171 if (mChannelHandle == 0) {
1172 LOGE("add_channel failed");
1173 rc = -ENOMEM;
1174 pthread_mutex_unlock(&mMutex);
1175 return rc;
1176 }
1177
1178 pthread_mutex_unlock(&mMutex);
1179 mCameraInitialized = true;
1180 mState = INITIALIZED;
1181 LOGI("X");
1182 return 0;
1183
1184err1:
1185 pthread_mutex_unlock(&mMutex);
1186 return rc;
1187}
1188
1189/*===========================================================================
1190 * FUNCTION : validateStreamDimensions
1191 *
1192 * DESCRIPTION: Check if the configuration requested are those advertised
1193 *
1194 * PARAMETERS :
1195 * @stream_list : streams to be configured
1196 *
1197 * RETURN :
1198 *
1199 *==========================================================================*/
1200int QCamera3HardwareInterface::validateStreamDimensions(
1201 camera3_stream_configuration_t *streamList)
1202{
1203 int rc = NO_ERROR;
1204 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001205 uint32_t depthWidth = 0;
1206 uint32_t depthHeight = 0;
1207 if (mPDSupported) {
1208 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1209 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1210 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001211
1212 camera3_stream_t *inputStream = NULL;
1213 /*
1214 * Loop through all streams to find input stream if it exists*
1215 */
1216 for (size_t i = 0; i< streamList->num_streams; i++) {
1217 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1218 if (inputStream != NULL) {
1219 LOGE("Error, Multiple input streams requested");
1220 return -EINVAL;
1221 }
1222 inputStream = streamList->streams[i];
1223 }
1224 }
1225 /*
1226 * Loop through all streams requested in configuration
1227 * Check if unsupported sizes have been requested on any of them
1228 */
1229 for (size_t j = 0; j < streamList->num_streams; j++) {
1230 bool sizeFound = false;
1231 camera3_stream_t *newStream = streamList->streams[j];
1232
1233 uint32_t rotatedHeight = newStream->height;
1234 uint32_t rotatedWidth = newStream->width;
1235 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1236 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1237 rotatedHeight = newStream->width;
1238 rotatedWidth = newStream->height;
1239 }
1240
1241 /*
1242 * Sizes are different for each type of stream format check against
1243 * appropriate table.
1244 */
1245 switch (newStream->format) {
1246 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1247 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1248 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001249 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1250 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1251 mPDSupported) {
1252 if ((depthWidth == newStream->width) &&
1253 (depthHeight == newStream->height)) {
1254 sizeFound = true;
1255 }
1256 break;
1257 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001258 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1259 for (size_t i = 0; i < count; i++) {
1260 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1261 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1262 sizeFound = true;
1263 break;
1264 }
1265 }
1266 break;
1267 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001268 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1269 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001270 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001271 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001272 if ((depthSamplesCount == newStream->width) &&
1273 (1 == newStream->height)) {
1274 sizeFound = true;
1275 }
1276 break;
1277 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001278 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1279 /* Verify set size against generated sizes table */
1280 for (size_t i = 0; i < count; i++) {
1281 if (((int32_t)rotatedWidth ==
1282 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1283 ((int32_t)rotatedHeight ==
1284 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1285 sizeFound = true;
1286 break;
1287 }
1288 }
1289 break;
1290 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1291 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1292 default:
1293 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1294 || newStream->stream_type == CAMERA3_STREAM_INPUT
1295 || IS_USAGE_ZSL(newStream->usage)) {
1296 if (((int32_t)rotatedWidth ==
1297 gCamCapability[mCameraId]->active_array_size.width) &&
1298 ((int32_t)rotatedHeight ==
1299 gCamCapability[mCameraId]->active_array_size.height)) {
1300 sizeFound = true;
1301 break;
1302 }
1303 /* We could potentially break here to enforce ZSL stream
1304 * set from frameworks always is full active array size
1305 * but it is not clear from the spc if framework will always
1306 * follow that, also we have logic to override to full array
1307 * size, so keeping the logic lenient at the moment
1308 */
1309 }
1310 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1311 MAX_SIZES_CNT);
1312 for (size_t i = 0; i < count; i++) {
1313 if (((int32_t)rotatedWidth ==
1314 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1315 ((int32_t)rotatedHeight ==
1316 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1317 sizeFound = true;
1318 break;
1319 }
1320 }
1321 break;
1322 } /* End of switch(newStream->format) */
1323
1324 /* We error out even if a single stream has unsupported size set */
1325 if (!sizeFound) {
1326 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1327 rotatedWidth, rotatedHeight, newStream->format,
1328 gCamCapability[mCameraId]->active_array_size.width,
1329 gCamCapability[mCameraId]->active_array_size.height);
1330 rc = -EINVAL;
1331 break;
1332 }
1333 } /* End of for each stream */
1334 return rc;
1335}
1336
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001337/*===========================================================================
1338 * FUNCTION : validateUsageFlags
1339 *
1340 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1341 *
1342 * PARAMETERS :
1343 * @stream_list : streams to be configured
1344 *
1345 * RETURN :
1346 * NO_ERROR if the usage flags are supported
1347 * error code if usage flags are not supported
1348 *
1349 *==========================================================================*/
1350int QCamera3HardwareInterface::validateUsageFlags(
1351 const camera3_stream_configuration_t* streamList)
1352{
1353 for (size_t j = 0; j < streamList->num_streams; j++) {
1354 const camera3_stream_t *newStream = streamList->streams[j];
1355
1356 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1357 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1358 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1359 continue;
1360 }
1361
Jason Leec4cf5032017-05-24 18:31:41 -07001362 // Here we only care whether it's EIS3 or not
1363 char is_type_value[PROPERTY_VALUE_MAX];
1364 property_get("persist.camera.is_type", is_type_value, "4");
1365 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1366 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1367 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1368 isType = IS_TYPE_NONE;
1369
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001370 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1371 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1372 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1373 bool forcePreviewUBWC = true;
1374 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1375 forcePreviewUBWC = false;
1376 }
1377 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001378 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001379 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001380 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001381 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001382 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001383
1384 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1385 // So color spaces will always match.
1386
1387 // Check whether underlying formats of shared streams match.
1388 if (isVideo && isPreview && videoFormat != previewFormat) {
1389 LOGE("Combined video and preview usage flag is not supported");
1390 return -EINVAL;
1391 }
1392 if (isPreview && isZSL && previewFormat != zslFormat) {
1393 LOGE("Combined preview and zsl usage flag is not supported");
1394 return -EINVAL;
1395 }
1396 if (isVideo && isZSL && videoFormat != zslFormat) {
1397 LOGE("Combined video and zsl usage flag is not supported");
1398 return -EINVAL;
1399 }
1400 }
1401 return NO_ERROR;
1402}
1403
1404/*===========================================================================
1405 * FUNCTION : validateUsageFlagsForEis
1406 *
1407 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1408 *
1409 * PARAMETERS :
1410 * @stream_list : streams to be configured
1411 *
1412 * RETURN :
1413 * NO_ERROR if the usage flags are supported
1414 * error code if usage flags are not supported
1415 *
1416 *==========================================================================*/
1417int QCamera3HardwareInterface::validateUsageFlagsForEis(
1418 const camera3_stream_configuration_t* streamList)
1419{
1420 for (size_t j = 0; j < streamList->num_streams; j++) {
1421 const camera3_stream_t *newStream = streamList->streams[j];
1422
1423 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1424 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1425
1426 // Because EIS is "hard-coded" for certain use case, and current
1427 // implementation doesn't support shared preview and video on the same
1428 // stream, return failure if EIS is forced on.
1429 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1430 LOGE("Combined video and preview usage flag is not supported due to EIS");
1431 return -EINVAL;
1432 }
1433 }
1434 return NO_ERROR;
1435}
1436
Thierry Strudel3d639192016-09-09 11:52:26 -07001437/*==============================================================================
1438 * FUNCTION : isSupportChannelNeeded
1439 *
1440 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1441 *
1442 * PARAMETERS :
1443 * @stream_list : streams to be configured
1444 * @stream_config_info : the config info for streams to be configured
1445 *
1446 * RETURN : Boolen true/false decision
1447 *
1448 *==========================================================================*/
1449bool QCamera3HardwareInterface::isSupportChannelNeeded(
1450 camera3_stream_configuration_t *streamList,
1451 cam_stream_size_info_t stream_config_info)
1452{
1453 uint32_t i;
1454 bool pprocRequested = false;
1455 /* Check for conditions where PProc pipeline does not have any streams*/
1456 for (i = 0; i < stream_config_info.num_streams; i++) {
1457 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1458 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1459 pprocRequested = true;
1460 break;
1461 }
1462 }
1463
1464 if (pprocRequested == false )
1465 return true;
1466
1467 /* Dummy stream needed if only raw or jpeg streams present */
1468 for (i = 0; i < streamList->num_streams; i++) {
1469 switch(streamList->streams[i]->format) {
1470 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1471 case HAL_PIXEL_FORMAT_RAW10:
1472 case HAL_PIXEL_FORMAT_RAW16:
1473 case HAL_PIXEL_FORMAT_BLOB:
1474 break;
1475 default:
1476 return false;
1477 }
1478 }
1479 return true;
1480}
1481
1482/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001483 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001484 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001485 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001486 *
1487 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001488 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001489 *
1490 * RETURN : int32_t type of status
1491 * NO_ERROR -- success
1492 * none-zero failure code
1493 *
1494 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001495int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001496{
1497 int32_t rc = NO_ERROR;
1498
1499 cam_dimension_t max_dim = {0, 0};
1500 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1501 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1502 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1503 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1504 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1505 }
1506
1507 clear_metadata_buffer(mParameters);
1508
1509 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1510 max_dim);
1511 if (rc != NO_ERROR) {
1512 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1513 return rc;
1514 }
1515
1516 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1517 if (rc != NO_ERROR) {
1518 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1519 return rc;
1520 }
1521
1522 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001523 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001524
1525 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1526 mParameters);
1527 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001528 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001529 return rc;
1530 }
1531
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001532 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001533 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1534 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1535 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1536 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1537 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001538
1539 return rc;
1540}
1541
1542/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001543 * FUNCTION : getCurrentSensorModeInfo
1544 *
1545 * DESCRIPTION: Get sensor mode information that is currently selected.
1546 *
1547 * PARAMETERS :
1548 * @sensorModeInfo : sensor mode information (output)
1549 *
1550 * RETURN : int32_t type of status
1551 * NO_ERROR -- success
1552 * none-zero failure code
1553 *
1554 *==========================================================================*/
1555int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1556{
1557 int32_t rc = NO_ERROR;
1558
1559 clear_metadata_buffer(mParameters);
1560 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1561
1562 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1563 mParameters);
1564 if (rc != NO_ERROR) {
1565 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1566 return rc;
1567 }
1568
1569 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1570 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1571 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1572 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1573 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1574 sensorModeInfo.num_raw_bits);
1575
1576 return rc;
1577}
1578
1579/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001580 * FUNCTION : addToPPFeatureMask
1581 *
1582 * DESCRIPTION: add additional features to pp feature mask based on
1583 * stream type and usecase
1584 *
1585 * PARAMETERS :
1586 * @stream_format : stream type for feature mask
1587 * @stream_idx : stream idx within postprocess_mask list to change
1588 *
1589 * RETURN : NULL
1590 *
1591 *==========================================================================*/
1592void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1593 uint32_t stream_idx)
1594{
1595 char feature_mask_value[PROPERTY_VALUE_MAX];
1596 cam_feature_mask_t feature_mask;
1597 int args_converted;
1598 int property_len;
1599
1600 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001601#ifdef _LE_CAMERA_
1602 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1603 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1604 property_len = property_get("persist.camera.hal3.feature",
1605 feature_mask_value, swtnr_feature_mask_value);
1606#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001607 property_len = property_get("persist.camera.hal3.feature",
1608 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001609#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001610 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1611 (feature_mask_value[1] == 'x')) {
1612 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1613 } else {
1614 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1615 }
1616 if (1 != args_converted) {
1617 feature_mask = 0;
1618 LOGE("Wrong feature mask %s", feature_mask_value);
1619 return;
1620 }
1621
1622 switch (stream_format) {
1623 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1624 /* Add LLVD to pp feature mask only if video hint is enabled */
1625 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1626 mStreamConfigInfo.postprocess_mask[stream_idx]
1627 |= CAM_QTI_FEATURE_SW_TNR;
1628 LOGH("Added SW TNR to pp feature mask");
1629 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1630 mStreamConfigInfo.postprocess_mask[stream_idx]
1631 |= CAM_QCOM_FEATURE_LLVD;
1632 LOGH("Added LLVD SeeMore to pp feature mask");
1633 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001634 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1635 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1636 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1637 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001638 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1639 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1640 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1641 CAM_QTI_FEATURE_BINNING_CORRECTION;
1642 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001643 break;
1644 }
1645 default:
1646 break;
1647 }
1648 LOGD("PP feature mask %llx",
1649 mStreamConfigInfo.postprocess_mask[stream_idx]);
1650}
1651
1652/*==============================================================================
1653 * FUNCTION : updateFpsInPreviewBuffer
1654 *
1655 * DESCRIPTION: update FPS information in preview buffer.
1656 *
1657 * PARAMETERS :
1658 * @metadata : pointer to metadata buffer
1659 * @frame_number: frame_number to look for in pending buffer list
1660 *
1661 * RETURN : None
1662 *
1663 *==========================================================================*/
1664void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1665 uint32_t frame_number)
1666{
1667 // Mark all pending buffers for this particular request
1668 // with corresponding framerate information
1669 for (List<PendingBuffersInRequest>::iterator req =
1670 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1671 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1672 for(List<PendingBufferInfo>::iterator j =
1673 req->mPendingBufferList.begin();
1674 j != req->mPendingBufferList.end(); j++) {
1675 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1676 if ((req->frame_number == frame_number) &&
1677 (channel->getStreamTypeMask() &
1678 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1679 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1680 CAM_INTF_PARM_FPS_RANGE, metadata) {
1681 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1682 struct private_handle_t *priv_handle =
1683 (struct private_handle_t *)(*(j->buffer));
1684 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1685 }
1686 }
1687 }
1688 }
1689}
1690
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001691/*==============================================================================
1692 * FUNCTION : updateTimeStampInPendingBuffers
1693 *
1694 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1695 * of a frame number
1696 *
1697 * PARAMETERS :
1698 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1699 * @timestamp : timestamp to be set
1700 *
1701 * RETURN : None
1702 *
1703 *==========================================================================*/
1704void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1705 uint32_t frameNumber, nsecs_t timestamp)
1706{
1707 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1708 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1709 if (req->frame_number != frameNumber)
1710 continue;
1711
1712 for (auto k = req->mPendingBufferList.begin();
1713 k != req->mPendingBufferList.end(); k++ ) {
1714 struct private_handle_t *priv_handle =
1715 (struct private_handle_t *) (*(k->buffer));
1716 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1717 }
1718 }
1719 return;
1720}
1721
Thierry Strudel3d639192016-09-09 11:52:26 -07001722/*===========================================================================
1723 * FUNCTION : configureStreams
1724 *
1725 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1726 * and output streams.
1727 *
1728 * PARAMETERS :
1729 * @stream_list : streams to be configured
1730 *
1731 * RETURN :
1732 *
1733 *==========================================================================*/
1734int QCamera3HardwareInterface::configureStreams(
1735 camera3_stream_configuration_t *streamList)
1736{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001737 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001738 int rc = 0;
1739
1740 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001741 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001742 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001743 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001744
1745 return rc;
1746}
1747
1748/*===========================================================================
1749 * FUNCTION : configureStreamsPerfLocked
1750 *
1751 * DESCRIPTION: configureStreams while perfLock is held.
1752 *
1753 * PARAMETERS :
1754 * @stream_list : streams to be configured
1755 *
1756 * RETURN : int32_t type of status
1757 * NO_ERROR -- success
1758 * none-zero failure code
1759 *==========================================================================*/
1760int QCamera3HardwareInterface::configureStreamsPerfLocked(
1761 camera3_stream_configuration_t *streamList)
1762{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001763 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001764 int rc = 0;
1765
1766 // Sanity check stream_list
1767 if (streamList == NULL) {
1768 LOGE("NULL stream configuration");
1769 return BAD_VALUE;
1770 }
1771 if (streamList->streams == NULL) {
1772 LOGE("NULL stream list");
1773 return BAD_VALUE;
1774 }
1775
1776 if (streamList->num_streams < 1) {
1777 LOGE("Bad number of streams requested: %d",
1778 streamList->num_streams);
1779 return BAD_VALUE;
1780 }
1781
1782 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1783 LOGE("Maximum number of streams %d exceeded: %d",
1784 MAX_NUM_STREAMS, streamList->num_streams);
1785 return BAD_VALUE;
1786 }
1787
Jason Leec4cf5032017-05-24 18:31:41 -07001788 mOpMode = streamList->operation_mode;
1789 LOGD("mOpMode: %d", mOpMode);
1790
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001791 rc = validateUsageFlags(streamList);
1792 if (rc != NO_ERROR) {
1793 return rc;
1794 }
1795
Thierry Strudel3d639192016-09-09 11:52:26 -07001796 /* first invalidate all the steams in the mStreamList
1797 * if they appear again, they will be validated */
1798 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1799 it != mStreamInfo.end(); it++) {
1800 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1801 if (channel) {
1802 channel->stop();
1803 }
1804 (*it)->status = INVALID;
1805 }
1806
1807 if (mRawDumpChannel) {
1808 mRawDumpChannel->stop();
1809 delete mRawDumpChannel;
1810 mRawDumpChannel = NULL;
1811 }
1812
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001813 if (mHdrPlusRawSrcChannel) {
1814 mHdrPlusRawSrcChannel->stop();
1815 delete mHdrPlusRawSrcChannel;
1816 mHdrPlusRawSrcChannel = NULL;
1817 }
1818
Thierry Strudel3d639192016-09-09 11:52:26 -07001819 if (mSupportChannel)
1820 mSupportChannel->stop();
1821
1822 if (mAnalysisChannel) {
1823 mAnalysisChannel->stop();
1824 }
1825 if (mMetadataChannel) {
1826 /* If content of mStreamInfo is not 0, there is metadata stream */
1827 mMetadataChannel->stop();
1828 }
1829 if (mChannelHandle) {
1830 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07001831 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001832 LOGD("stopping channel %d", mChannelHandle);
1833 }
1834
1835 pthread_mutex_lock(&mMutex);
1836
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001837 mPictureChannel = NULL;
1838
Thierry Strudel3d639192016-09-09 11:52:26 -07001839 // Check state
1840 switch (mState) {
1841 case INITIALIZED:
1842 case CONFIGURED:
1843 case STARTED:
1844 /* valid state */
1845 break;
1846 default:
1847 LOGE("Invalid state %d", mState);
1848 pthread_mutex_unlock(&mMutex);
1849 return -ENODEV;
1850 }
1851
1852 /* Check whether we have video stream */
1853 m_bIs4KVideo = false;
1854 m_bIsVideo = false;
1855 m_bEisSupportedSize = false;
1856 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001857 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001858 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001859 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001860 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001861 uint32_t videoWidth = 0U;
1862 uint32_t videoHeight = 0U;
1863 size_t rawStreamCnt = 0;
1864 size_t stallStreamCnt = 0;
1865 size_t processedStreamCnt = 0;
1866 // Number of streams on ISP encoder path
1867 size_t numStreamsOnEncoder = 0;
1868 size_t numYuv888OnEncoder = 0;
1869 bool bYuv888OverrideJpeg = false;
1870 cam_dimension_t largeYuv888Size = {0, 0};
1871 cam_dimension_t maxViewfinderSize = {0, 0};
1872 bool bJpegExceeds4K = false;
1873 bool bJpegOnEncoder = false;
1874 bool bUseCommonFeatureMask = false;
1875 cam_feature_mask_t commonFeatureMask = 0;
1876 bool bSmallJpegSize = false;
1877 uint32_t width_ratio;
1878 uint32_t height_ratio;
1879 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1880 camera3_stream_t *inputStream = NULL;
1881 bool isJpeg = false;
1882 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001883 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001884 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001885
1886 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1887
1888 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001889 uint8_t eis_prop_set;
1890 uint32_t maxEisWidth = 0;
1891 uint32_t maxEisHeight = 0;
1892
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001893 // Initialize all instant AEC related variables
1894 mInstantAEC = false;
1895 mResetInstantAEC = false;
1896 mInstantAECSettledFrameNumber = 0;
1897 mAecSkipDisplayFrameBound = 0;
1898 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001899 mCurrFeatureState = 0;
1900 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001901
Thierry Strudel3d639192016-09-09 11:52:26 -07001902 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1903
1904 size_t count = IS_TYPE_MAX;
1905 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1906 for (size_t i = 0; i < count; i++) {
1907 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001908 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1909 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001910 break;
1911 }
1912 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001913
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001914 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001915 maxEisWidth = MAX_EIS_WIDTH;
1916 maxEisHeight = MAX_EIS_HEIGHT;
1917 }
1918
1919 /* EIS setprop control */
1920 char eis_prop[PROPERTY_VALUE_MAX];
1921 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001922 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001923 eis_prop_set = (uint8_t)atoi(eis_prop);
1924
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001925 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001926 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1927
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001928 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1929 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001930
Thierry Strudel3d639192016-09-09 11:52:26 -07001931 /* stream configurations */
1932 for (size_t i = 0; i < streamList->num_streams; i++) {
1933 camera3_stream_t *newStream = streamList->streams[i];
1934 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1935 "height = %d, rotation = %d, usage = 0x%x",
1936 i, newStream->stream_type, newStream->format,
1937 newStream->width, newStream->height, newStream->rotation,
1938 newStream->usage);
1939 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1940 newStream->stream_type == CAMERA3_STREAM_INPUT){
1941 isZsl = true;
1942 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001943 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1944 IS_USAGE_PREVIEW(newStream->usage)) {
1945 isPreview = true;
1946 }
1947
Thierry Strudel3d639192016-09-09 11:52:26 -07001948 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1949 inputStream = newStream;
1950 }
1951
Emilian Peev7650c122017-01-19 08:24:33 -08001952 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1953 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001954 isJpeg = true;
1955 jpegSize.width = newStream->width;
1956 jpegSize.height = newStream->height;
1957 if (newStream->width > VIDEO_4K_WIDTH ||
1958 newStream->height > VIDEO_4K_HEIGHT)
1959 bJpegExceeds4K = true;
1960 }
1961
1962 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1963 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1964 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001965 // In HAL3 we can have multiple different video streams.
1966 // The variables video width and height are used below as
1967 // dimensions of the biggest of them
1968 if (videoWidth < newStream->width ||
1969 videoHeight < newStream->height) {
1970 videoWidth = newStream->width;
1971 videoHeight = newStream->height;
1972 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001973 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1974 (VIDEO_4K_HEIGHT <= newStream->height)) {
1975 m_bIs4KVideo = true;
1976 }
1977 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1978 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001979
Thierry Strudel3d639192016-09-09 11:52:26 -07001980 }
1981 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1982 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1983 switch (newStream->format) {
1984 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001985 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1986 depthPresent = true;
1987 break;
1988 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001989 stallStreamCnt++;
1990 if (isOnEncoder(maxViewfinderSize, newStream->width,
1991 newStream->height)) {
1992 numStreamsOnEncoder++;
1993 bJpegOnEncoder = true;
1994 }
1995 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1996 newStream->width);
1997 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1998 newStream->height);;
1999 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2000 "FATAL: max_downscale_factor cannot be zero and so assert");
2001 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2002 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2003 LOGH("Setting small jpeg size flag to true");
2004 bSmallJpegSize = true;
2005 }
2006 break;
2007 case HAL_PIXEL_FORMAT_RAW10:
2008 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2009 case HAL_PIXEL_FORMAT_RAW16:
2010 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002011 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2012 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2013 pdStatCount++;
2014 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002015 break;
2016 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2017 processedStreamCnt++;
2018 if (isOnEncoder(maxViewfinderSize, newStream->width,
2019 newStream->height)) {
2020 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2021 !IS_USAGE_ZSL(newStream->usage)) {
2022 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2023 }
2024 numStreamsOnEncoder++;
2025 }
2026 break;
2027 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2028 processedStreamCnt++;
2029 if (isOnEncoder(maxViewfinderSize, newStream->width,
2030 newStream->height)) {
2031 // If Yuv888 size is not greater than 4K, set feature mask
2032 // to SUPERSET so that it support concurrent request on
2033 // YUV and JPEG.
2034 if (newStream->width <= VIDEO_4K_WIDTH &&
2035 newStream->height <= VIDEO_4K_HEIGHT) {
2036 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2037 }
2038 numStreamsOnEncoder++;
2039 numYuv888OnEncoder++;
2040 largeYuv888Size.width = newStream->width;
2041 largeYuv888Size.height = newStream->height;
2042 }
2043 break;
2044 default:
2045 processedStreamCnt++;
2046 if (isOnEncoder(maxViewfinderSize, newStream->width,
2047 newStream->height)) {
2048 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2049 numStreamsOnEncoder++;
2050 }
2051 break;
2052 }
2053
2054 }
2055 }
2056
2057 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2058 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2059 !m_bIsVideo) {
2060 m_bEisEnable = false;
2061 }
2062
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002063 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2064 pthread_mutex_unlock(&mMutex);
2065 return -EINVAL;
2066 }
2067
Thierry Strudel54dc9782017-02-15 12:12:10 -08002068 uint8_t forceEnableTnr = 0;
2069 char tnr_prop[PROPERTY_VALUE_MAX];
2070 memset(tnr_prop, 0, sizeof(tnr_prop));
2071 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2072 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2073
Thierry Strudel3d639192016-09-09 11:52:26 -07002074 /* Logic to enable/disable TNR based on specific config size/etc.*/
2075 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002076 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2077 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002078 else if (forceEnableTnr)
2079 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002080
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002081 char videoHdrProp[PROPERTY_VALUE_MAX];
2082 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2083 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2084 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2085
2086 if (hdr_mode_prop == 1 && m_bIsVideo &&
2087 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2088 m_bVideoHdrEnabled = true;
2089 else
2090 m_bVideoHdrEnabled = false;
2091
2092
Thierry Strudel3d639192016-09-09 11:52:26 -07002093 /* Check if num_streams is sane */
2094 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2095 rawStreamCnt > MAX_RAW_STREAMS ||
2096 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2097 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2098 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2099 pthread_mutex_unlock(&mMutex);
2100 return -EINVAL;
2101 }
2102 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002103 if (isZsl && m_bIs4KVideo) {
2104 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002105 pthread_mutex_unlock(&mMutex);
2106 return -EINVAL;
2107 }
2108 /* Check if stream sizes are sane */
2109 if (numStreamsOnEncoder > 2) {
2110 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2111 pthread_mutex_unlock(&mMutex);
2112 return -EINVAL;
2113 } else if (1 < numStreamsOnEncoder){
2114 bUseCommonFeatureMask = true;
2115 LOGH("Multiple streams above max viewfinder size, common mask needed");
2116 }
2117
2118 /* Check if BLOB size is greater than 4k in 4k recording case */
2119 if (m_bIs4KVideo && bJpegExceeds4K) {
2120 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2121 pthread_mutex_unlock(&mMutex);
2122 return -EINVAL;
2123 }
2124
Emilian Peev7650c122017-01-19 08:24:33 -08002125 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2126 depthPresent) {
2127 LOGE("HAL doesn't support depth streams in HFR mode!");
2128 pthread_mutex_unlock(&mMutex);
2129 return -EINVAL;
2130 }
2131
Thierry Strudel3d639192016-09-09 11:52:26 -07002132 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2133 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2134 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2135 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2136 // configurations:
2137 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2138 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2139 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2140 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2141 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2142 __func__);
2143 pthread_mutex_unlock(&mMutex);
2144 return -EINVAL;
2145 }
2146
2147 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2148 // the YUV stream's size is greater or equal to the JPEG size, set common
2149 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2150 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2151 jpegSize.width, jpegSize.height) &&
2152 largeYuv888Size.width > jpegSize.width &&
2153 largeYuv888Size.height > jpegSize.height) {
2154 bYuv888OverrideJpeg = true;
2155 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2156 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2157 }
2158
2159 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2160 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2161 commonFeatureMask);
2162 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2163 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2164
2165 rc = validateStreamDimensions(streamList);
2166 if (rc == NO_ERROR) {
2167 rc = validateStreamRotations(streamList);
2168 }
2169 if (rc != NO_ERROR) {
2170 LOGE("Invalid stream configuration requested!");
2171 pthread_mutex_unlock(&mMutex);
2172 return rc;
2173 }
2174
Emilian Peev0f3c3162017-03-15 12:57:46 +00002175 if (1 < pdStatCount) {
2176 LOGE("HAL doesn't support multiple PD streams");
2177 pthread_mutex_unlock(&mMutex);
2178 return -EINVAL;
2179 }
2180
2181 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2182 (1 == pdStatCount)) {
2183 LOGE("HAL doesn't support PD streams in HFR mode!");
2184 pthread_mutex_unlock(&mMutex);
2185 return -EINVAL;
2186 }
2187
Thierry Strudel3d639192016-09-09 11:52:26 -07002188 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2189 for (size_t i = 0; i < streamList->num_streams; i++) {
2190 camera3_stream_t *newStream = streamList->streams[i];
2191 LOGH("newStream type = %d, stream format = %d "
2192 "stream size : %d x %d, stream rotation = %d",
2193 newStream->stream_type, newStream->format,
2194 newStream->width, newStream->height, newStream->rotation);
2195 //if the stream is in the mStreamList validate it
2196 bool stream_exists = false;
2197 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2198 it != mStreamInfo.end(); it++) {
2199 if ((*it)->stream == newStream) {
2200 QCamera3ProcessingChannel *channel =
2201 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2202 stream_exists = true;
2203 if (channel)
2204 delete channel;
2205 (*it)->status = VALID;
2206 (*it)->stream->priv = NULL;
2207 (*it)->channel = NULL;
2208 }
2209 }
2210 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2211 //new stream
2212 stream_info_t* stream_info;
2213 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2214 if (!stream_info) {
2215 LOGE("Could not allocate stream info");
2216 rc = -ENOMEM;
2217 pthread_mutex_unlock(&mMutex);
2218 return rc;
2219 }
2220 stream_info->stream = newStream;
2221 stream_info->status = VALID;
2222 stream_info->channel = NULL;
2223 mStreamInfo.push_back(stream_info);
2224 }
2225 /* Covers Opaque ZSL and API1 F/W ZSL */
2226 if (IS_USAGE_ZSL(newStream->usage)
2227 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2228 if (zslStream != NULL) {
2229 LOGE("Multiple input/reprocess streams requested!");
2230 pthread_mutex_unlock(&mMutex);
2231 return BAD_VALUE;
2232 }
2233 zslStream = newStream;
2234 }
2235 /* Covers YUV reprocess */
2236 if (inputStream != NULL) {
2237 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2238 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2239 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2240 && inputStream->width == newStream->width
2241 && inputStream->height == newStream->height) {
2242 if (zslStream != NULL) {
2243 /* This scenario indicates multiple YUV streams with same size
2244 * as input stream have been requested, since zsl stream handle
2245 * is solely use for the purpose of overriding the size of streams
2246 * which share h/w streams we will just make a guess here as to
2247 * which of the stream is a ZSL stream, this will be refactored
2248 * once we make generic logic for streams sharing encoder output
2249 */
2250 LOGH("Warning, Multiple ip/reprocess streams requested!");
2251 }
2252 zslStream = newStream;
2253 }
2254 }
2255 }
2256
2257 /* If a zsl stream is set, we know that we have configured at least one input or
2258 bidirectional stream */
2259 if (NULL != zslStream) {
2260 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2261 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2262 mInputStreamInfo.format = zslStream->format;
2263 mInputStreamInfo.usage = zslStream->usage;
2264 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2265 mInputStreamInfo.dim.width,
2266 mInputStreamInfo.dim.height,
2267 mInputStreamInfo.format, mInputStreamInfo.usage);
2268 }
2269
2270 cleanAndSortStreamInfo();
2271 if (mMetadataChannel) {
2272 delete mMetadataChannel;
2273 mMetadataChannel = NULL;
2274 }
2275 if (mSupportChannel) {
2276 delete mSupportChannel;
2277 mSupportChannel = NULL;
2278 }
2279
2280 if (mAnalysisChannel) {
2281 delete mAnalysisChannel;
2282 mAnalysisChannel = NULL;
2283 }
2284
2285 if (mDummyBatchChannel) {
2286 delete mDummyBatchChannel;
2287 mDummyBatchChannel = NULL;
2288 }
2289
Emilian Peev7650c122017-01-19 08:24:33 -08002290 if (mDepthChannel) {
2291 mDepthChannel = NULL;
2292 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002293 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002294
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002295 mShutterDispatcher.clear();
2296 mOutputBufferDispatcher.clear();
2297
Thierry Strudel2896d122017-02-23 19:18:03 -08002298 char is_type_value[PROPERTY_VALUE_MAX];
2299 property_get("persist.camera.is_type", is_type_value, "4");
2300 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2301
Binhao Line406f062017-05-03 14:39:44 -07002302 char property_value[PROPERTY_VALUE_MAX];
2303 property_get("persist.camera.gzoom.at", property_value, "0");
2304 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002305 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2306 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2307 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2308 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002309
2310 property_get("persist.camera.gzoom.4k", property_value, "0");
2311 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2312
Thierry Strudel3d639192016-09-09 11:52:26 -07002313 //Create metadata channel and initialize it
2314 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2315 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2316 gCamCapability[mCameraId]->color_arrangement);
2317 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2318 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002319 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002320 if (mMetadataChannel == NULL) {
2321 LOGE("failed to allocate metadata channel");
2322 rc = -ENOMEM;
2323 pthread_mutex_unlock(&mMutex);
2324 return rc;
2325 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002326 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002327 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2328 if (rc < 0) {
2329 LOGE("metadata channel initialization failed");
2330 delete mMetadataChannel;
2331 mMetadataChannel = NULL;
2332 pthread_mutex_unlock(&mMutex);
2333 return rc;
2334 }
2335
Thierry Strudel2896d122017-02-23 19:18:03 -08002336 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002337 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002338 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002339 // Keep track of preview/video streams indices.
2340 // There could be more than one preview streams, but only one video stream.
2341 int32_t video_stream_idx = -1;
2342 int32_t preview_stream_idx[streamList->num_streams];
2343 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002344 bool previewTnr[streamList->num_streams];
2345 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2346 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2347 // Loop through once to determine preview TNR conditions before creating channels.
2348 for (size_t i = 0; i < streamList->num_streams; i++) {
2349 camera3_stream_t *newStream = streamList->streams[i];
2350 uint32_t stream_usage = newStream->usage;
2351 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2352 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2353 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2354 video_stream_idx = (int32_t)i;
2355 else
2356 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2357 }
2358 }
2359 // By default, preview stream TNR is disabled.
2360 // Enable TNR to the preview stream if all conditions below are satisfied:
2361 // 1. preview resolution == video resolution.
2362 // 2. video stream TNR is enabled.
2363 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2364 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2365 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2366 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2367 if (m_bTnrEnabled && m_bTnrVideo &&
2368 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2369 video_stream->width == preview_stream->width &&
2370 video_stream->height == preview_stream->height) {
2371 previewTnr[preview_stream_idx[i]] = true;
2372 }
2373 }
2374
Thierry Strudel3d639192016-09-09 11:52:26 -07002375 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2376 /* Allocate channel objects for the requested streams */
2377 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002378
Thierry Strudel3d639192016-09-09 11:52:26 -07002379 camera3_stream_t *newStream = streamList->streams[i];
2380 uint32_t stream_usage = newStream->usage;
2381 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2382 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2383 struct camera_info *p_info = NULL;
2384 pthread_mutex_lock(&gCamLock);
2385 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2386 pthread_mutex_unlock(&gCamLock);
2387 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2388 || IS_USAGE_ZSL(newStream->usage)) &&
2389 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002390 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002391 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002392 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2393 if (bUseCommonFeatureMask)
2394 zsl_ppmask = commonFeatureMask;
2395 else
2396 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002397 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002398 if (numStreamsOnEncoder > 0)
2399 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2400 else
2401 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002402 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002403 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002404 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002405 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002406 LOGH("Input stream configured, reprocess config");
2407 } else {
2408 //for non zsl streams find out the format
2409 switch (newStream->format) {
2410 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2411 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002412 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002413 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2414 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2415 /* add additional features to pp feature mask */
2416 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2417 mStreamConfigInfo.num_streams);
2418
2419 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2420 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2421 CAM_STREAM_TYPE_VIDEO;
2422 if (m_bTnrEnabled && m_bTnrVideo) {
2423 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2424 CAM_QCOM_FEATURE_CPP_TNR;
2425 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2426 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2427 ~CAM_QCOM_FEATURE_CDS;
2428 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002429 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2430 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2431 CAM_QTI_FEATURE_PPEISCORE;
2432 }
Binhao Line406f062017-05-03 14:39:44 -07002433 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2434 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2435 CAM_QCOM_FEATURE_GOOG_ZOOM;
2436 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002437 } else {
2438 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2439 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002440 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002441 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2442 CAM_QCOM_FEATURE_CPP_TNR;
2443 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2444 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2445 ~CAM_QCOM_FEATURE_CDS;
2446 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002447 if(!m_bSwTnrPreview) {
2448 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2449 ~CAM_QTI_FEATURE_SW_TNR;
2450 }
Binhao Line406f062017-05-03 14:39:44 -07002451 if (is_goog_zoom_preview_enabled) {
2452 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2453 CAM_QCOM_FEATURE_GOOG_ZOOM;
2454 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002455 padding_info.width_padding = mSurfaceStridePadding;
2456 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002457 previewSize.width = (int32_t)newStream->width;
2458 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002459 }
2460 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2461 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2462 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2463 newStream->height;
2464 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2465 newStream->width;
2466 }
2467 }
2468 break;
2469 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002470 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002471 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2472 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2473 if (bUseCommonFeatureMask)
2474 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2475 commonFeatureMask;
2476 else
2477 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2478 CAM_QCOM_FEATURE_NONE;
2479 } else {
2480 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2481 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2482 }
2483 break;
2484 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002485 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002486 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2487 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2488 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2489 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2490 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002491 /* Remove rotation if it is not supported
2492 for 4K LiveVideo snapshot case (online processing) */
2493 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2494 CAM_QCOM_FEATURE_ROTATION)) {
2495 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2496 &= ~CAM_QCOM_FEATURE_ROTATION;
2497 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002498 } else {
2499 if (bUseCommonFeatureMask &&
2500 isOnEncoder(maxViewfinderSize, newStream->width,
2501 newStream->height)) {
2502 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2503 } else {
2504 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2505 }
2506 }
2507 if (isZsl) {
2508 if (zslStream) {
2509 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2510 (int32_t)zslStream->width;
2511 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2512 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002513 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2514 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002515 } else {
2516 LOGE("Error, No ZSL stream identified");
2517 pthread_mutex_unlock(&mMutex);
2518 return -EINVAL;
2519 }
2520 } else if (m_bIs4KVideo) {
2521 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2522 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2523 } else if (bYuv888OverrideJpeg) {
2524 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2525 (int32_t)largeYuv888Size.width;
2526 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2527 (int32_t)largeYuv888Size.height;
2528 }
2529 break;
2530 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2531 case HAL_PIXEL_FORMAT_RAW16:
2532 case HAL_PIXEL_FORMAT_RAW10:
2533 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2534 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2535 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002536 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2537 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2538 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2539 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2540 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2541 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2542 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2543 gCamCapability[mCameraId]->dt[mPDIndex];
2544 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2545 gCamCapability[mCameraId]->vc[mPDIndex];
2546 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002547 break;
2548 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002549 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002550 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2551 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2552 break;
2553 }
2554 }
2555
2556 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2557 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2558 gCamCapability[mCameraId]->color_arrangement);
2559
2560 if (newStream->priv == NULL) {
2561 //New stream, construct channel
2562 switch (newStream->stream_type) {
2563 case CAMERA3_STREAM_INPUT:
2564 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2565 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2566 break;
2567 case CAMERA3_STREAM_BIDIRECTIONAL:
2568 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2569 GRALLOC_USAGE_HW_CAMERA_WRITE;
2570 break;
2571 case CAMERA3_STREAM_OUTPUT:
2572 /* For video encoding stream, set read/write rarely
2573 * flag so that they may be set to un-cached */
2574 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2575 newStream->usage |=
2576 (GRALLOC_USAGE_SW_READ_RARELY |
2577 GRALLOC_USAGE_SW_WRITE_RARELY |
2578 GRALLOC_USAGE_HW_CAMERA_WRITE);
2579 else if (IS_USAGE_ZSL(newStream->usage))
2580 {
2581 LOGD("ZSL usage flag skipping");
2582 }
2583 else if (newStream == zslStream
2584 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2585 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2586 } else
2587 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2588 break;
2589 default:
2590 LOGE("Invalid stream_type %d", newStream->stream_type);
2591 break;
2592 }
2593
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002594 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002595 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2596 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2597 QCamera3ProcessingChannel *channel = NULL;
2598 switch (newStream->format) {
2599 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2600 if ((newStream->usage &
2601 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2602 (streamList->operation_mode ==
2603 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2604 ) {
2605 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2606 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002607 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002608 this,
2609 newStream,
2610 (cam_stream_type_t)
2611 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2612 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2613 mMetadataChannel,
2614 0); //heap buffers are not required for HFR video channel
2615 if (channel == NULL) {
2616 LOGE("allocation of channel failed");
2617 pthread_mutex_unlock(&mMutex);
2618 return -ENOMEM;
2619 }
2620 //channel->getNumBuffers() will return 0 here so use
2621 //MAX_INFLIGH_HFR_REQUESTS
2622 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2623 newStream->priv = channel;
2624 LOGI("num video buffers in HFR mode: %d",
2625 MAX_INFLIGHT_HFR_REQUESTS);
2626 } else {
2627 /* Copy stream contents in HFR preview only case to create
2628 * dummy batch channel so that sensor streaming is in
2629 * HFR mode */
2630 if (!m_bIsVideo && (streamList->operation_mode ==
2631 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2632 mDummyBatchStream = *newStream;
2633 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002634 int bufferCount = MAX_INFLIGHT_REQUESTS;
2635 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2636 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002637 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2638 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2639 bufferCount = m_bIs4KVideo ?
2640 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2641 }
2642
Thierry Strudel2896d122017-02-23 19:18:03 -08002643 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002644 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2645 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002646 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002647 this,
2648 newStream,
2649 (cam_stream_type_t)
2650 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2651 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2652 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002653 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002654 if (channel == NULL) {
2655 LOGE("allocation of channel failed");
2656 pthread_mutex_unlock(&mMutex);
2657 return -ENOMEM;
2658 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002659 /* disable UBWC for preview, though supported,
2660 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002661 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002662 (previewSize.width == (int32_t)videoWidth)&&
2663 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002664 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002665 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002666 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002667 /* When goog_zoom is linked to the preview or video stream,
2668 * disable ubwc to the linked stream */
2669 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2670 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2671 channel->setUBWCEnabled(false);
2672 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002673 newStream->max_buffers = channel->getNumBuffers();
2674 newStream->priv = channel;
2675 }
2676 break;
2677 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2678 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2679 mChannelHandle,
2680 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002681 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002682 this,
2683 newStream,
2684 (cam_stream_type_t)
2685 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2686 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2687 mMetadataChannel);
2688 if (channel == NULL) {
2689 LOGE("allocation of YUV channel failed");
2690 pthread_mutex_unlock(&mMutex);
2691 return -ENOMEM;
2692 }
2693 newStream->max_buffers = channel->getNumBuffers();
2694 newStream->priv = channel;
2695 break;
2696 }
2697 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2698 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002699 case HAL_PIXEL_FORMAT_RAW10: {
2700 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2701 (HAL_DATASPACE_DEPTH != newStream->data_space))
2702 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002703 mRawChannel = new QCamera3RawChannel(
2704 mCameraHandle->camera_handle, mChannelHandle,
2705 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002706 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002707 this, newStream,
2708 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002709 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002710 if (mRawChannel == NULL) {
2711 LOGE("allocation of raw channel failed");
2712 pthread_mutex_unlock(&mMutex);
2713 return -ENOMEM;
2714 }
2715 newStream->max_buffers = mRawChannel->getNumBuffers();
2716 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2717 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002718 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002719 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002720 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2721 mDepthChannel = new QCamera3DepthChannel(
2722 mCameraHandle->camera_handle, mChannelHandle,
2723 mCameraHandle->ops, NULL, NULL, &padding_info,
2724 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2725 mMetadataChannel);
2726 if (NULL == mDepthChannel) {
2727 LOGE("Allocation of depth channel failed");
2728 pthread_mutex_unlock(&mMutex);
2729 return NO_MEMORY;
2730 }
2731 newStream->priv = mDepthChannel;
2732 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2733 } else {
2734 // Max live snapshot inflight buffer is 1. This is to mitigate
2735 // frame drop issues for video snapshot. The more buffers being
2736 // allocated, the more frame drops there are.
2737 mPictureChannel = new QCamera3PicChannel(
2738 mCameraHandle->camera_handle, mChannelHandle,
2739 mCameraHandle->ops, captureResultCb,
2740 setBufferErrorStatus, &padding_info, this, newStream,
2741 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2742 m_bIs4KVideo, isZsl, mMetadataChannel,
2743 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2744 if (mPictureChannel == NULL) {
2745 LOGE("allocation of channel failed");
2746 pthread_mutex_unlock(&mMutex);
2747 return -ENOMEM;
2748 }
2749 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2750 newStream->max_buffers = mPictureChannel->getNumBuffers();
2751 mPictureChannel->overrideYuvSize(
2752 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2753 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002754 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002755 break;
2756
2757 default:
2758 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002759 pthread_mutex_unlock(&mMutex);
2760 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002761 }
2762 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2763 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2764 } else {
2765 LOGE("Error, Unknown stream type");
2766 pthread_mutex_unlock(&mMutex);
2767 return -EINVAL;
2768 }
2769
2770 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002771 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002772 // Here we only care whether it's EIS3 or not
2773 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2774 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2775 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2776 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002777 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002778 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002779 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002780 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2781 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2782 }
2783 }
2784
2785 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2786 it != mStreamInfo.end(); it++) {
2787 if ((*it)->stream == newStream) {
2788 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2789 break;
2790 }
2791 }
2792 } else {
2793 // Channel already exists for this stream
2794 // Do nothing for now
2795 }
2796 padding_info = gCamCapability[mCameraId]->padding_info;
2797
Emilian Peev7650c122017-01-19 08:24:33 -08002798 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002799 * since there is no real stream associated with it
2800 */
Emilian Peev7650c122017-01-19 08:24:33 -08002801 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002802 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2803 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002804 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002805 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002806 }
2807
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002808 // Let buffer dispatcher know the configured streams.
2809 mOutputBufferDispatcher.configureStreams(streamList);
2810
Thierry Strudel2896d122017-02-23 19:18:03 -08002811 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2812 onlyRaw = false;
2813 }
2814
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002815 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002816 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002817 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002818 cam_analysis_info_t analysisInfo;
2819 int32_t ret = NO_ERROR;
2820 ret = mCommon.getAnalysisInfo(
2821 FALSE,
2822 analysisFeatureMask,
2823 &analysisInfo);
2824 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002825 cam_color_filter_arrangement_t analysis_color_arrangement =
2826 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2827 CAM_FILTER_ARRANGEMENT_Y :
2828 gCamCapability[mCameraId]->color_arrangement);
2829 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2830 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002831 cam_dimension_t analysisDim;
2832 analysisDim = mCommon.getMatchingDimension(previewSize,
2833 analysisInfo.analysis_recommended_res);
2834
2835 mAnalysisChannel = new QCamera3SupportChannel(
2836 mCameraHandle->camera_handle,
2837 mChannelHandle,
2838 mCameraHandle->ops,
2839 &analysisInfo.analysis_padding_info,
2840 analysisFeatureMask,
2841 CAM_STREAM_TYPE_ANALYSIS,
2842 &analysisDim,
2843 (analysisInfo.analysis_format
2844 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2845 : CAM_FORMAT_YUV_420_NV21),
2846 analysisInfo.hw_analysis_supported,
2847 gCamCapability[mCameraId]->color_arrangement,
2848 this,
2849 0); // force buffer count to 0
2850 } else {
2851 LOGW("getAnalysisInfo failed, ret = %d", ret);
2852 }
2853 if (!mAnalysisChannel) {
2854 LOGW("Analysis channel cannot be created");
2855 }
2856 }
2857
Thierry Strudel3d639192016-09-09 11:52:26 -07002858 //RAW DUMP channel
2859 if (mEnableRawDump && isRawStreamRequested == false){
2860 cam_dimension_t rawDumpSize;
2861 rawDumpSize = getMaxRawSize(mCameraId);
2862 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2863 setPAAFSupport(rawDumpFeatureMask,
2864 CAM_STREAM_TYPE_RAW,
2865 gCamCapability[mCameraId]->color_arrangement);
2866 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2867 mChannelHandle,
2868 mCameraHandle->ops,
2869 rawDumpSize,
2870 &padding_info,
2871 this, rawDumpFeatureMask);
2872 if (!mRawDumpChannel) {
2873 LOGE("Raw Dump channel cannot be created");
2874 pthread_mutex_unlock(&mMutex);
2875 return -ENOMEM;
2876 }
2877 }
2878
Thierry Strudel3d639192016-09-09 11:52:26 -07002879 if (mAnalysisChannel) {
2880 cam_analysis_info_t analysisInfo;
2881 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2882 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2883 CAM_STREAM_TYPE_ANALYSIS;
2884 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2885 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002886 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002887 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2888 &analysisInfo);
2889 if (rc != NO_ERROR) {
2890 LOGE("getAnalysisInfo failed, ret = %d", rc);
2891 pthread_mutex_unlock(&mMutex);
2892 return rc;
2893 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002894 cam_color_filter_arrangement_t analysis_color_arrangement =
2895 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2896 CAM_FILTER_ARRANGEMENT_Y :
2897 gCamCapability[mCameraId]->color_arrangement);
2898 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2899 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2900 analysis_color_arrangement);
2901
Thierry Strudel3d639192016-09-09 11:52:26 -07002902 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002903 mCommon.getMatchingDimension(previewSize,
2904 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002905 mStreamConfigInfo.num_streams++;
2906 }
2907
Thierry Strudel2896d122017-02-23 19:18:03 -08002908 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002909 cam_analysis_info_t supportInfo;
2910 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2911 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2912 setPAAFSupport(callbackFeatureMask,
2913 CAM_STREAM_TYPE_CALLBACK,
2914 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002915 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002916 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002917 if (ret != NO_ERROR) {
2918 /* Ignore the error for Mono camera
2919 * because the PAAF bit mask is only set
2920 * for CAM_STREAM_TYPE_ANALYSIS stream type
2921 */
2922 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2923 LOGW("getAnalysisInfo failed, ret = %d", ret);
2924 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002925 }
2926 mSupportChannel = new QCamera3SupportChannel(
2927 mCameraHandle->camera_handle,
2928 mChannelHandle,
2929 mCameraHandle->ops,
2930 &gCamCapability[mCameraId]->padding_info,
2931 callbackFeatureMask,
2932 CAM_STREAM_TYPE_CALLBACK,
2933 &QCamera3SupportChannel::kDim,
2934 CAM_FORMAT_YUV_420_NV21,
2935 supportInfo.hw_analysis_supported,
2936 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002937 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002938 if (!mSupportChannel) {
2939 LOGE("dummy channel cannot be created");
2940 pthread_mutex_unlock(&mMutex);
2941 return -ENOMEM;
2942 }
2943 }
2944
2945 if (mSupportChannel) {
2946 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2947 QCamera3SupportChannel::kDim;
2948 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2949 CAM_STREAM_TYPE_CALLBACK;
2950 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2951 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2952 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2953 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2954 gCamCapability[mCameraId]->color_arrangement);
2955 mStreamConfigInfo.num_streams++;
2956 }
2957
2958 if (mRawDumpChannel) {
2959 cam_dimension_t rawSize;
2960 rawSize = getMaxRawSize(mCameraId);
2961 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2962 rawSize;
2963 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2964 CAM_STREAM_TYPE_RAW;
2965 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2966 CAM_QCOM_FEATURE_NONE;
2967 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2968 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2969 gCamCapability[mCameraId]->color_arrangement);
2970 mStreamConfigInfo.num_streams++;
2971 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002972
2973 if (mHdrPlusRawSrcChannel) {
2974 cam_dimension_t rawSize;
2975 rawSize = getMaxRawSize(mCameraId);
2976 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2977 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2978 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2979 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2980 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2981 gCamCapability[mCameraId]->color_arrangement);
2982 mStreamConfigInfo.num_streams++;
2983 }
2984
Thierry Strudel3d639192016-09-09 11:52:26 -07002985 /* In HFR mode, if video stream is not added, create a dummy channel so that
2986 * ISP can create a batch mode even for preview only case. This channel is
2987 * never 'start'ed (no stream-on), it is only 'initialized' */
2988 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2989 !m_bIsVideo) {
2990 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2991 setPAAFSupport(dummyFeatureMask,
2992 CAM_STREAM_TYPE_VIDEO,
2993 gCamCapability[mCameraId]->color_arrangement);
2994 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2995 mChannelHandle,
2996 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002997 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002998 this,
2999 &mDummyBatchStream,
3000 CAM_STREAM_TYPE_VIDEO,
3001 dummyFeatureMask,
3002 mMetadataChannel);
3003 if (NULL == mDummyBatchChannel) {
3004 LOGE("creation of mDummyBatchChannel failed."
3005 "Preview will use non-hfr sensor mode ");
3006 }
3007 }
3008 if (mDummyBatchChannel) {
3009 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3010 mDummyBatchStream.width;
3011 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3012 mDummyBatchStream.height;
3013 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3014 CAM_STREAM_TYPE_VIDEO;
3015 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3016 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3017 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3018 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3019 gCamCapability[mCameraId]->color_arrangement);
3020 mStreamConfigInfo.num_streams++;
3021 }
3022
3023 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3024 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003025 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003026 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003027
3028 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3029 for (pendingRequestIterator i = mPendingRequestsList.begin();
3030 i != mPendingRequestsList.end();) {
3031 i = erasePendingRequest(i);
3032 }
3033 mPendingFrameDropList.clear();
3034 // Initialize/Reset the pending buffers list
3035 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3036 req.mPendingBufferList.clear();
3037 }
3038 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003039 mExpectedInflightDuration = 0;
3040 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003041
Thierry Strudel3d639192016-09-09 11:52:26 -07003042 mCurJpegMeta.clear();
3043 //Get min frame duration for this streams configuration
3044 deriveMinFrameDuration();
3045
Chien-Yu Chenee335912017-02-09 17:53:20 -08003046 mFirstPreviewIntentSeen = false;
3047
3048 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003049 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003050 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3051 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003052 disableHdrPlusModeLocked();
3053 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003054
Thierry Strudel3d639192016-09-09 11:52:26 -07003055 // Update state
3056 mState = CONFIGURED;
3057
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003058 mFirstMetadataCallback = true;
3059
Thierry Strudel3d639192016-09-09 11:52:26 -07003060 pthread_mutex_unlock(&mMutex);
3061
3062 return rc;
3063}
3064
3065/*===========================================================================
3066 * FUNCTION : validateCaptureRequest
3067 *
3068 * DESCRIPTION: validate a capture request from camera service
3069 *
3070 * PARAMETERS :
3071 * @request : request from framework to process
3072 *
3073 * RETURN :
3074 *
3075 *==========================================================================*/
3076int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003077 camera3_capture_request_t *request,
3078 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003079{
3080 ssize_t idx = 0;
3081 const camera3_stream_buffer_t *b;
3082 CameraMetadata meta;
3083
3084 /* Sanity check the request */
3085 if (request == NULL) {
3086 LOGE("NULL capture request");
3087 return BAD_VALUE;
3088 }
3089
3090 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3091 /*settings cannot be null for the first request*/
3092 return BAD_VALUE;
3093 }
3094
3095 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003096 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3097 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003098 LOGE("Request %d: No output buffers provided!",
3099 __FUNCTION__, frameNumber);
3100 return BAD_VALUE;
3101 }
3102 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3103 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3104 request->num_output_buffers, MAX_NUM_STREAMS);
3105 return BAD_VALUE;
3106 }
3107 if (request->input_buffer != NULL) {
3108 b = request->input_buffer;
3109 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3110 LOGE("Request %d: Buffer %ld: Status not OK!",
3111 frameNumber, (long)idx);
3112 return BAD_VALUE;
3113 }
3114 if (b->release_fence != -1) {
3115 LOGE("Request %d: Buffer %ld: Has a release fence!",
3116 frameNumber, (long)idx);
3117 return BAD_VALUE;
3118 }
3119 if (b->buffer == NULL) {
3120 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3121 frameNumber, (long)idx);
3122 return BAD_VALUE;
3123 }
3124 }
3125
3126 // Validate all buffers
3127 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003128 if (b == NULL) {
3129 return BAD_VALUE;
3130 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003131 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003132 QCamera3ProcessingChannel *channel =
3133 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3134 if (channel == NULL) {
3135 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3136 frameNumber, (long)idx);
3137 return BAD_VALUE;
3138 }
3139 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3140 LOGE("Request %d: Buffer %ld: Status not OK!",
3141 frameNumber, (long)idx);
3142 return BAD_VALUE;
3143 }
3144 if (b->release_fence != -1) {
3145 LOGE("Request %d: Buffer %ld: Has a release fence!",
3146 frameNumber, (long)idx);
3147 return BAD_VALUE;
3148 }
3149 if (b->buffer == NULL) {
3150 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3151 frameNumber, (long)idx);
3152 return BAD_VALUE;
3153 }
3154 if (*(b->buffer) == NULL) {
3155 LOGE("Request %d: Buffer %ld: NULL private handle!",
3156 frameNumber, (long)idx);
3157 return BAD_VALUE;
3158 }
3159 idx++;
3160 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003161 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003162 return NO_ERROR;
3163}
3164
3165/*===========================================================================
3166 * FUNCTION : deriveMinFrameDuration
3167 *
3168 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3169 * on currently configured streams.
3170 *
3171 * PARAMETERS : NONE
3172 *
3173 * RETURN : NONE
3174 *
3175 *==========================================================================*/
3176void QCamera3HardwareInterface::deriveMinFrameDuration()
3177{
3178 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003179 bool hasRaw = false;
3180
3181 mMinRawFrameDuration = 0;
3182 mMinJpegFrameDuration = 0;
3183 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003184
3185 maxJpegDim = 0;
3186 maxProcessedDim = 0;
3187 maxRawDim = 0;
3188
3189 // Figure out maximum jpeg, processed, and raw dimensions
3190 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3191 it != mStreamInfo.end(); it++) {
3192
3193 // Input stream doesn't have valid stream_type
3194 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3195 continue;
3196
3197 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3198 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3199 if (dimension > maxJpegDim)
3200 maxJpegDim = dimension;
3201 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3202 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3203 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003204 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003205 if (dimension > maxRawDim)
3206 maxRawDim = dimension;
3207 } else {
3208 if (dimension > maxProcessedDim)
3209 maxProcessedDim = dimension;
3210 }
3211 }
3212
3213 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3214 MAX_SIZES_CNT);
3215
3216 //Assume all jpeg dimensions are in processed dimensions.
3217 if (maxJpegDim > maxProcessedDim)
3218 maxProcessedDim = maxJpegDim;
3219 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003220 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003221 maxRawDim = INT32_MAX;
3222
3223 for (size_t i = 0; i < count; i++) {
3224 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3225 gCamCapability[mCameraId]->raw_dim[i].height;
3226 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3227 maxRawDim = dimension;
3228 }
3229 }
3230
3231 //Find minimum durations for processed, jpeg, and raw
3232 for (size_t i = 0; i < count; i++) {
3233 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3234 gCamCapability[mCameraId]->raw_dim[i].height) {
3235 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3236 break;
3237 }
3238 }
3239 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3240 for (size_t i = 0; i < count; i++) {
3241 if (maxProcessedDim ==
3242 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3243 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3244 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3245 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3246 break;
3247 }
3248 }
3249}
3250
3251/*===========================================================================
3252 * FUNCTION : getMinFrameDuration
3253 *
3254 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3255 * and current request configuration.
3256 *
3257 * PARAMETERS : @request: requset sent by the frameworks
3258 *
3259 * RETURN : min farme duration for a particular request
3260 *
3261 *==========================================================================*/
3262int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3263{
3264 bool hasJpegStream = false;
3265 bool hasRawStream = false;
3266 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3267 const camera3_stream_t *stream = request->output_buffers[i].stream;
3268 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3269 hasJpegStream = true;
3270 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3271 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3272 stream->format == HAL_PIXEL_FORMAT_RAW16)
3273 hasRawStream = true;
3274 }
3275
3276 if (!hasJpegStream)
3277 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3278 else
3279 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3280}
3281
3282/*===========================================================================
3283 * FUNCTION : handleBuffersDuringFlushLock
3284 *
3285 * DESCRIPTION: Account for buffers returned from back-end during flush
3286 * This function is executed while mMutex is held by the caller.
3287 *
3288 * PARAMETERS :
3289 * @buffer: image buffer for the callback
3290 *
3291 * RETURN :
3292 *==========================================================================*/
3293void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3294{
3295 bool buffer_found = false;
3296 for (List<PendingBuffersInRequest>::iterator req =
3297 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3298 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3299 for (List<PendingBufferInfo>::iterator i =
3300 req->mPendingBufferList.begin();
3301 i != req->mPendingBufferList.end(); i++) {
3302 if (i->buffer == buffer->buffer) {
3303 mPendingBuffersMap.numPendingBufsAtFlush--;
3304 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3305 buffer->buffer, req->frame_number,
3306 mPendingBuffersMap.numPendingBufsAtFlush);
3307 buffer_found = true;
3308 break;
3309 }
3310 }
3311 if (buffer_found) {
3312 break;
3313 }
3314 }
3315 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3316 //signal the flush()
3317 LOGD("All buffers returned to HAL. Continue flush");
3318 pthread_cond_signal(&mBuffersCond);
3319 }
3320}
3321
Thierry Strudel3d639192016-09-09 11:52:26 -07003322/*===========================================================================
3323 * FUNCTION : handleBatchMetadata
3324 *
3325 * DESCRIPTION: Handles metadata buffer callback in batch mode
3326 *
3327 * PARAMETERS : @metadata_buf: metadata buffer
3328 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3329 * the meta buf in this method
3330 *
3331 * RETURN :
3332 *
3333 *==========================================================================*/
3334void QCamera3HardwareInterface::handleBatchMetadata(
3335 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3336{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003337 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003338
3339 if (NULL == metadata_buf) {
3340 LOGE("metadata_buf is NULL");
3341 return;
3342 }
3343 /* In batch mode, the metdata will contain the frame number and timestamp of
3344 * the last frame in the batch. Eg: a batch containing buffers from request
3345 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3346 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3347 * multiple process_capture_results */
3348 metadata_buffer_t *metadata =
3349 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3350 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3351 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3352 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3353 uint32_t frame_number = 0, urgent_frame_number = 0;
3354 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3355 bool invalid_metadata = false;
3356 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3357 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003358 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003359
3360 int32_t *p_frame_number_valid =
3361 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3362 uint32_t *p_frame_number =
3363 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3364 int64_t *p_capture_time =
3365 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3366 int32_t *p_urgent_frame_number_valid =
3367 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3368 uint32_t *p_urgent_frame_number =
3369 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3370
3371 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3372 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3373 (NULL == p_urgent_frame_number)) {
3374 LOGE("Invalid metadata");
3375 invalid_metadata = true;
3376 } else {
3377 frame_number_valid = *p_frame_number_valid;
3378 last_frame_number = *p_frame_number;
3379 last_frame_capture_time = *p_capture_time;
3380 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3381 last_urgent_frame_number = *p_urgent_frame_number;
3382 }
3383
3384 /* In batchmode, when no video buffers are requested, set_parms are sent
3385 * for every capture_request. The difference between consecutive urgent
3386 * frame numbers and frame numbers should be used to interpolate the
3387 * corresponding frame numbers and time stamps */
3388 pthread_mutex_lock(&mMutex);
3389 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003390 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3391 if(idx < 0) {
3392 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3393 last_urgent_frame_number);
3394 mState = ERROR;
3395 pthread_mutex_unlock(&mMutex);
3396 return;
3397 }
3398 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003399 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3400 first_urgent_frame_number;
3401
3402 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3403 urgent_frame_number_valid,
3404 first_urgent_frame_number, last_urgent_frame_number);
3405 }
3406
3407 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003408 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3409 if(idx < 0) {
3410 LOGE("Invalid frame number received: %d. Irrecoverable error",
3411 last_frame_number);
3412 mState = ERROR;
3413 pthread_mutex_unlock(&mMutex);
3414 return;
3415 }
3416 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003417 frameNumDiff = last_frame_number + 1 -
3418 first_frame_number;
3419 mPendingBatchMap.removeItem(last_frame_number);
3420
3421 LOGD("frm: valid: %d frm_num: %d - %d",
3422 frame_number_valid,
3423 first_frame_number, last_frame_number);
3424
3425 }
3426 pthread_mutex_unlock(&mMutex);
3427
3428 if (urgent_frame_number_valid || frame_number_valid) {
3429 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3430 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3431 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3432 urgentFrameNumDiff, last_urgent_frame_number);
3433 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3434 LOGE("frameNumDiff: %d frameNum: %d",
3435 frameNumDiff, last_frame_number);
3436 }
3437
3438 for (size_t i = 0; i < loopCount; i++) {
3439 /* handleMetadataWithLock is called even for invalid_metadata for
3440 * pipeline depth calculation */
3441 if (!invalid_metadata) {
3442 /* Infer frame number. Batch metadata contains frame number of the
3443 * last frame */
3444 if (urgent_frame_number_valid) {
3445 if (i < urgentFrameNumDiff) {
3446 urgent_frame_number =
3447 first_urgent_frame_number + i;
3448 LOGD("inferred urgent frame_number: %d",
3449 urgent_frame_number);
3450 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3451 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3452 } else {
3453 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3454 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3455 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3456 }
3457 }
3458
3459 /* Infer frame number. Batch metadata contains frame number of the
3460 * last frame */
3461 if (frame_number_valid) {
3462 if (i < frameNumDiff) {
3463 frame_number = first_frame_number + i;
3464 LOGD("inferred frame_number: %d", frame_number);
3465 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3466 CAM_INTF_META_FRAME_NUMBER, frame_number);
3467 } else {
3468 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3469 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3470 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3471 }
3472 }
3473
3474 if (last_frame_capture_time) {
3475 //Infer timestamp
3476 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003477 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003478 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003479 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003480 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3481 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3482 LOGD("batch capture_time: %lld, capture_time: %lld",
3483 last_frame_capture_time, capture_time);
3484 }
3485 }
3486 pthread_mutex_lock(&mMutex);
3487 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003488 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003489 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3490 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003491 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003492 pthread_mutex_unlock(&mMutex);
3493 }
3494
3495 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003496 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003497 mMetadataChannel->bufDone(metadata_buf);
3498 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003499 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003500 }
3501}
3502
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003503void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3504 camera3_error_msg_code_t errorCode)
3505{
3506 camera3_notify_msg_t notify_msg;
3507 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3508 notify_msg.type = CAMERA3_MSG_ERROR;
3509 notify_msg.message.error.error_code = errorCode;
3510 notify_msg.message.error.error_stream = NULL;
3511 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003512 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003513
3514 return;
3515}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003516
3517/*===========================================================================
3518 * FUNCTION : sendPartialMetadataWithLock
3519 *
3520 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3521 *
3522 * PARAMETERS : @metadata: metadata buffer
3523 * @requestIter: The iterator for the pending capture request for
3524 * which the partial result is being sen
3525 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3526 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003527 * @isJumpstartMetadata: Whether this is a partial metadata for
3528 * jumpstart, i.e. even though it doesn't map to a valid partial
3529 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003530 *
3531 * RETURN :
3532 *
3533 *==========================================================================*/
3534
3535void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3536 metadata_buffer_t *metadata,
3537 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003538 bool lastUrgentMetadataInBatch,
3539 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003540{
3541 camera3_capture_result_t result;
3542 memset(&result, 0, sizeof(camera3_capture_result_t));
3543
3544 requestIter->partial_result_cnt++;
3545
3546 // Extract 3A metadata
3547 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003548 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3549 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003550 // Populate metadata result
3551 result.frame_number = requestIter->frame_number;
3552 result.num_output_buffers = 0;
3553 result.output_buffers = NULL;
3554 result.partial_result = requestIter->partial_result_cnt;
3555
3556 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003557 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003558 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3559 // Notify HDR+ client about the partial metadata.
3560 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3561 result.partial_result == PARTIAL_RESULT_COUNT);
3562 }
3563 }
3564
3565 orchestrateResult(&result);
3566 LOGD("urgent frame_number = %u", result.frame_number);
3567 free_camera_metadata((camera_metadata_t *)result.result);
3568}
3569
Thierry Strudel3d639192016-09-09 11:52:26 -07003570/*===========================================================================
3571 * FUNCTION : handleMetadataWithLock
3572 *
3573 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3574 *
3575 * PARAMETERS : @metadata_buf: metadata buffer
3576 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3577 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003578 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3579 * last urgent metadata in a batch. Always true for non-batch mode
3580 * @lastMetadataInBatch: Boolean to indicate whether this is the
3581 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003582 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3583 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003584 *
3585 * RETURN :
3586 *
3587 *==========================================================================*/
3588void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003589 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003590 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3591 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003592{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003593 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003594 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3595 //during flush do not send metadata from this thread
3596 LOGD("not sending metadata during flush or when mState is error");
3597 if (free_and_bufdone_meta_buf) {
3598 mMetadataChannel->bufDone(metadata_buf);
3599 free(metadata_buf);
3600 }
3601 return;
3602 }
3603
3604 //not in flush
3605 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3606 int32_t frame_number_valid, urgent_frame_number_valid;
3607 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003608 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003609 nsecs_t currentSysTime;
3610
3611 int32_t *p_frame_number_valid =
3612 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3613 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3614 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003615 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003616 int32_t *p_urgent_frame_number_valid =
3617 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3618 uint32_t *p_urgent_frame_number =
3619 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3620 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3621 metadata) {
3622 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3623 *p_frame_number_valid, *p_frame_number);
3624 }
3625
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003626 camera_metadata_t *resultMetadata = nullptr;
3627
Thierry Strudel3d639192016-09-09 11:52:26 -07003628 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3629 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3630 LOGE("Invalid metadata");
3631 if (free_and_bufdone_meta_buf) {
3632 mMetadataChannel->bufDone(metadata_buf);
3633 free(metadata_buf);
3634 }
3635 goto done_metadata;
3636 }
3637 frame_number_valid = *p_frame_number_valid;
3638 frame_number = *p_frame_number;
3639 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003640 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003641 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3642 urgent_frame_number = *p_urgent_frame_number;
3643 currentSysTime = systemTime(CLOCK_MONOTONIC);
3644
Jason Lee603176d2017-05-31 11:43:27 -07003645 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3646 const int tries = 3;
3647 nsecs_t bestGap, measured;
3648 for (int i = 0; i < tries; ++i) {
3649 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3650 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3651 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3652 const nsecs_t gap = tmono2 - tmono;
3653 if (i == 0 || gap < bestGap) {
3654 bestGap = gap;
3655 measured = tbase - ((tmono + tmono2) >> 1);
3656 }
3657 }
3658 capture_time -= measured;
3659 }
3660
Thierry Strudel3d639192016-09-09 11:52:26 -07003661 // Detect if buffers from any requests are overdue
3662 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003663 int64_t timeout;
3664 {
3665 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3666 // If there is a pending HDR+ request, the following requests may be blocked until the
3667 // HDR+ request is done. So allow a longer timeout.
3668 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3669 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003670 if (timeout < mExpectedInflightDuration) {
3671 timeout = mExpectedInflightDuration;
3672 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003673 }
3674
3675 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003676 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003677 assert(missed.stream->priv);
3678 if (missed.stream->priv) {
3679 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3680 assert(ch->mStreams[0]);
3681 if (ch->mStreams[0]) {
3682 LOGE("Cancel missing frame = %d, buffer = %p,"
3683 "stream type = %d, stream format = %d",
3684 req.frame_number, missed.buffer,
3685 ch->mStreams[0]->getMyType(), missed.stream->format);
3686 ch->timeoutFrame(req.frame_number);
3687 }
3688 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003689 }
3690 }
3691 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003692 //For the very first metadata callback, regardless whether it contains valid
3693 //frame number, send the partial metadata for the jumpstarting requests.
3694 //Note that this has to be done even if the metadata doesn't contain valid
3695 //urgent frame number, because in the case only 1 request is ever submitted
3696 //to HAL, there won't be subsequent valid urgent frame number.
3697 if (mFirstMetadataCallback) {
3698 for (pendingRequestIterator i =
3699 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3700 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003701 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3702 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003703 }
3704 }
3705 mFirstMetadataCallback = false;
3706 }
3707
Thierry Strudel3d639192016-09-09 11:52:26 -07003708 //Partial result on process_capture_result for timestamp
3709 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003710 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003711
3712 //Recieved an urgent Frame Number, handle it
3713 //using partial results
3714 for (pendingRequestIterator i =
3715 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3716 LOGD("Iterator Frame = %d urgent frame = %d",
3717 i->frame_number, urgent_frame_number);
3718
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003719 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003720 (i->partial_result_cnt == 0)) {
3721 LOGE("Error: HAL missed urgent metadata for frame number %d",
3722 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003723 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003724 }
3725
3726 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003727 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003728 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3729 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003730 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3731 // Instant AEC settled for this frame.
3732 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3733 mInstantAECSettledFrameNumber = urgent_frame_number;
3734 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003735 break;
3736 }
3737 }
3738 }
3739
3740 if (!frame_number_valid) {
3741 LOGD("Not a valid normal frame number, used as SOF only");
3742 if (free_and_bufdone_meta_buf) {
3743 mMetadataChannel->bufDone(metadata_buf);
3744 free(metadata_buf);
3745 }
3746 goto done_metadata;
3747 }
3748 LOGH("valid frame_number = %u, capture_time = %lld",
3749 frame_number, capture_time);
3750
Emilian Peev4e0fe952017-06-30 12:40:09 -07003751 handleDepthDataLocked(metadata->depth_data, frame_number,
3752 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003753
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003754 // Check whether any stream buffer corresponding to this is dropped or not
3755 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3756 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3757 for (auto & pendingRequest : mPendingRequestsList) {
3758 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3759 mInstantAECSettledFrameNumber)) {
3760 camera3_notify_msg_t notify_msg = {};
3761 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003762 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003763 QCamera3ProcessingChannel *channel =
3764 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003765 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003766 if (p_cam_frame_drop) {
3767 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003768 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003769 // Got the stream ID for drop frame.
3770 dropFrame = true;
3771 break;
3772 }
3773 }
3774 } else {
3775 // This is instant AEC case.
3776 // For instant AEC drop the stream untill AEC is settled.
3777 dropFrame = true;
3778 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003779
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003780 if (dropFrame) {
3781 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3782 if (p_cam_frame_drop) {
3783 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003784 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003785 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003786 } else {
3787 // For instant AEC, inform frame drop and frame number
3788 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3789 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003790 pendingRequest.frame_number, streamID,
3791 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003792 }
3793 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003794 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003795 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003796 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003797 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003798 if (p_cam_frame_drop) {
3799 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003800 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003801 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003802 } else {
3803 // For instant AEC, inform frame drop and frame number
3804 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3805 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003806 pendingRequest.frame_number, streamID,
3807 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003808 }
3809 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003810 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003811 PendingFrameDrop.stream_ID = streamID;
3812 // Add the Frame drop info to mPendingFrameDropList
3813 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003814 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003815 }
3816 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003817 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003818
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003819 for (auto & pendingRequest : mPendingRequestsList) {
3820 // Find the pending request with the frame number.
3821 if (pendingRequest.frame_number == frame_number) {
3822 // Update the sensor timestamp.
3823 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003824
Thierry Strudel3d639192016-09-09 11:52:26 -07003825
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003826 /* Set the timestamp in display metadata so that clients aware of
3827 private_handle such as VT can use this un-modified timestamps.
3828 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003829 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003830
Thierry Strudel3d639192016-09-09 11:52:26 -07003831 // Find channel requiring metadata, meaning internal offline postprocess
3832 // is needed.
3833 //TODO: for now, we don't support two streams requiring metadata at the same time.
3834 // (because we are not making copies, and metadata buffer is not reference counted.
3835 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003836 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3837 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003838 if (iter->need_metadata) {
3839 internalPproc = true;
3840 QCamera3ProcessingChannel *channel =
3841 (QCamera3ProcessingChannel *)iter->stream->priv;
3842 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003843 if(p_is_metabuf_queued != NULL) {
3844 *p_is_metabuf_queued = true;
3845 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003846 break;
3847 }
3848 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003849 for (auto itr = pendingRequest.internalRequestList.begin();
3850 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003851 if (itr->need_metadata) {
3852 internalPproc = true;
3853 QCamera3ProcessingChannel *channel =
3854 (QCamera3ProcessingChannel *)itr->stream->priv;
3855 channel->queueReprocMetadata(metadata_buf);
3856 break;
3857 }
3858 }
3859
Thierry Strudel54dc9782017-02-15 12:12:10 -08003860 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003861
3862 bool *enableZsl = nullptr;
3863 if (gExposeEnableZslKey) {
3864 enableZsl = &pendingRequest.enableZsl;
3865 }
3866
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003867 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003868 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003869 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003870
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003871 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003872
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003873 if (pendingRequest.blob_request) {
3874 //Dump tuning metadata if enabled and available
3875 char prop[PROPERTY_VALUE_MAX];
3876 memset(prop, 0, sizeof(prop));
3877 property_get("persist.camera.dumpmetadata", prop, "0");
3878 int32_t enabled = atoi(prop);
3879 if (enabled && metadata->is_tuning_params_valid) {
3880 dumpMetadataToFile(metadata->tuning_params,
3881 mMetaFrameCount,
3882 enabled,
3883 "Snapshot",
3884 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003885 }
3886 }
3887
3888 if (!internalPproc) {
3889 LOGD("couldn't find need_metadata for this metadata");
3890 // Return metadata buffer
3891 if (free_and_bufdone_meta_buf) {
3892 mMetadataChannel->bufDone(metadata_buf);
3893 free(metadata_buf);
3894 }
3895 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003896
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003897 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003898 }
3899 }
3900
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003901 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3902
3903 // Try to send out capture result metadata.
3904 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003905 return;
3906
Thierry Strudel3d639192016-09-09 11:52:26 -07003907done_metadata:
3908 for (pendingRequestIterator i = mPendingRequestsList.begin();
3909 i != mPendingRequestsList.end() ;i++) {
3910 i->pipeline_depth++;
3911 }
3912 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3913 unblockRequestIfNecessary();
3914}
3915
3916/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003917 * FUNCTION : handleDepthDataWithLock
3918 *
3919 * DESCRIPTION: Handles incoming depth data
3920 *
3921 * PARAMETERS : @depthData : Depth data
3922 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003923 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003924 *
3925 * RETURN :
3926 *
3927 *==========================================================================*/
3928void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003929 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003930 uint32_t currentFrameNumber;
3931 buffer_handle_t *depthBuffer;
3932
3933 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003934 return;
3935 }
3936
3937 camera3_stream_buffer_t resultBuffer =
3938 {.acquire_fence = -1,
3939 .release_fence = -1,
3940 .status = CAMERA3_BUFFER_STATUS_OK,
3941 .buffer = nullptr,
3942 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003943 do {
3944 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3945 if (nullptr == depthBuffer) {
3946 break;
3947 }
3948
Emilian Peev7650c122017-01-19 08:24:33 -08003949 resultBuffer.buffer = depthBuffer;
3950 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003951 if (valid) {
3952 int32_t rc = mDepthChannel->populateDepthData(depthData,
3953 frameNumber);
3954 if (NO_ERROR != rc) {
3955 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3956 } else {
3957 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3958 }
Emilian Peev7650c122017-01-19 08:24:33 -08003959 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003960 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003961 }
3962 } else if (currentFrameNumber > frameNumber) {
3963 break;
3964 } else {
3965 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3966 {{currentFrameNumber, mDepthChannel->getStream(),
3967 CAMERA3_MSG_ERROR_BUFFER}}};
3968 orchestrateNotify(&notify_msg);
3969
3970 LOGE("Depth buffer for frame number: %d is missing "
3971 "returning back!", currentFrameNumber);
3972 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3973 }
3974 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003975 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003976 } while (currentFrameNumber < frameNumber);
3977}
3978
3979/*===========================================================================
3980 * FUNCTION : notifyErrorFoPendingDepthData
3981 *
3982 * DESCRIPTION: Returns error for any pending depth buffers
3983 *
3984 * PARAMETERS : depthCh - depth channel that needs to get flushed
3985 *
3986 * RETURN :
3987 *
3988 *==========================================================================*/
3989void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3990 QCamera3DepthChannel *depthCh) {
3991 uint32_t currentFrameNumber;
3992 buffer_handle_t *depthBuffer;
3993
3994 if (nullptr == depthCh) {
3995 return;
3996 }
3997
3998 camera3_notify_msg_t notify_msg =
3999 {.type = CAMERA3_MSG_ERROR,
4000 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4001 camera3_stream_buffer_t resultBuffer =
4002 {.acquire_fence = -1,
4003 .release_fence = -1,
4004 .buffer = nullptr,
4005 .stream = depthCh->getStream(),
4006 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004007
4008 while (nullptr !=
4009 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4010 depthCh->unmapBuffer(currentFrameNumber);
4011
4012 notify_msg.message.error.frame_number = currentFrameNumber;
4013 orchestrateNotify(&notify_msg);
4014
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004015 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004016 };
4017}
4018
4019/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004020 * FUNCTION : hdrPlusPerfLock
4021 *
4022 * DESCRIPTION: perf lock for HDR+ using custom intent
4023 *
4024 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4025 *
4026 * RETURN : None
4027 *
4028 *==========================================================================*/
4029void QCamera3HardwareInterface::hdrPlusPerfLock(
4030 mm_camera_super_buf_t *metadata_buf)
4031{
4032 if (NULL == metadata_buf) {
4033 LOGE("metadata_buf is NULL");
4034 return;
4035 }
4036 metadata_buffer_t *metadata =
4037 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4038 int32_t *p_frame_number_valid =
4039 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4040 uint32_t *p_frame_number =
4041 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4042
4043 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4044 LOGE("%s: Invalid metadata", __func__);
4045 return;
4046 }
4047
Wei Wang01385482017-08-03 10:49:34 -07004048 //acquire perf lock for 2 secs after the last HDR frame is captured
4049 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004050 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4051 if ((p_frame_number != NULL) &&
4052 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004053 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004054 }
4055 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004056}
4057
4058/*===========================================================================
4059 * FUNCTION : handleInputBufferWithLock
4060 *
4061 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4062 *
4063 * PARAMETERS : @frame_number: frame number of the input buffer
4064 *
4065 * RETURN :
4066 *
4067 *==========================================================================*/
4068void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4069{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004070 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004071 pendingRequestIterator i = mPendingRequestsList.begin();
4072 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4073 i++;
4074 }
4075 if (i != mPendingRequestsList.end() && i->input_buffer) {
4076 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004077 CameraMetadata settings;
4078 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4079 if(i->settings) {
4080 settings = i->settings;
4081 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4082 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004083 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004084 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004085 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004086 } else {
4087 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004088 }
4089
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004090 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4091 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4092 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004093
4094 camera3_capture_result result;
4095 memset(&result, 0, sizeof(camera3_capture_result));
4096 result.frame_number = frame_number;
4097 result.result = i->settings;
4098 result.input_buffer = i->input_buffer;
4099 result.partial_result = PARTIAL_RESULT_COUNT;
4100
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004101 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004102 LOGD("Input request metadata and input buffer frame_number = %u",
4103 i->frame_number);
4104 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004105
4106 // Dispatch result metadata that may be just unblocked by this reprocess result.
4107 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004108 } else {
4109 LOGE("Could not find input request for frame number %d", frame_number);
4110 }
4111}
4112
4113/*===========================================================================
4114 * FUNCTION : handleBufferWithLock
4115 *
4116 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4117 *
4118 * PARAMETERS : @buffer: image buffer for the callback
4119 * @frame_number: frame number of the image buffer
4120 *
4121 * RETURN :
4122 *
4123 *==========================================================================*/
4124void QCamera3HardwareInterface::handleBufferWithLock(
4125 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4126{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004127 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004128
4129 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4130 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4131 }
4132
Thierry Strudel3d639192016-09-09 11:52:26 -07004133 /* Nothing to be done during error state */
4134 if ((ERROR == mState) || (DEINIT == mState)) {
4135 return;
4136 }
4137 if (mFlushPerf) {
4138 handleBuffersDuringFlushLock(buffer);
4139 return;
4140 }
4141 //not in flush
4142 // If the frame number doesn't exist in the pending request list,
4143 // directly send the buffer to the frameworks, and update pending buffers map
4144 // Otherwise, book-keep the buffer.
4145 pendingRequestIterator i = mPendingRequestsList.begin();
4146 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4147 i++;
4148 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004149
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004150 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004151 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004152 // For a reprocessing request, try to send out result metadata.
4153 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004154 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004155 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004156
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004157 // Check if this frame was dropped.
4158 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4159 m != mPendingFrameDropList.end(); m++) {
4160 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4161 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4162 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4163 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4164 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4165 frame_number, streamID);
4166 m = mPendingFrameDropList.erase(m);
4167 break;
4168 }
4169 }
4170
4171 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4172 LOGH("result frame_number = %d, buffer = %p",
4173 frame_number, buffer->buffer);
4174
4175 mPendingBuffersMap.removeBuf(buffer->buffer);
4176 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4177
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004178 if (mPreviewStarted == false) {
4179 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4180 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004181 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4182
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004183 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4184 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4185 mPreviewStarted = true;
4186
4187 // Set power hint for preview
4188 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4189 }
4190 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004191}
4192
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004193void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004194 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004195{
4196 // Find the pending request for this result metadata.
4197 auto requestIter = mPendingRequestsList.begin();
4198 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4199 requestIter++;
4200 }
4201
4202 if (requestIter == mPendingRequestsList.end()) {
4203 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4204 return;
4205 }
4206
4207 // Update the result metadata
4208 requestIter->resultMetadata = resultMetadata;
4209
4210 // Check what type of request this is.
4211 bool liveRequest = false;
4212 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004213 // HDR+ request doesn't have partial results.
4214 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004215 } else if (requestIter->input_buffer != nullptr) {
4216 // Reprocessing request result is the same as settings.
4217 requestIter->resultMetadata = requestIter->settings;
4218 // Reprocessing request doesn't have partial results.
4219 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4220 } else {
4221 liveRequest = true;
4222 requestIter->partial_result_cnt++;
4223 mPendingLiveRequest--;
4224
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004225 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004226 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004227 // For a live request, send the metadata to HDR+ client.
4228 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4229 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4230 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4231 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004232 }
4233 }
4234
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004235 // Remove len shading map if it's not requested.
4236 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4237 CameraMetadata metadata;
4238 metadata.acquire(resultMetadata);
4239 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4240 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4241 &requestIter->requestedLensShadingMapMode, 1);
4242
4243 requestIter->resultMetadata = metadata.release();
4244 }
4245
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004246 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4247}
4248
4249void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4250 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004251 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4252 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004253 bool readyToSend = true;
4254
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004255 // Iterate through the pending requests to send out result metadata that are ready. Also if
4256 // this result metadata belongs to a live request, notify errors for previous live requests
4257 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004258 auto iter = mPendingRequestsList.begin();
4259 while (iter != mPendingRequestsList.end()) {
4260 // Check if current pending request is ready. If it's not ready, the following pending
4261 // requests are also not ready.
4262 if (readyToSend && iter->resultMetadata == nullptr) {
4263 readyToSend = false;
4264 }
4265
4266 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4267
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004268 camera3_capture_result_t result = {};
4269 result.frame_number = iter->frame_number;
4270 result.result = iter->resultMetadata;
4271 result.partial_result = iter->partial_result_cnt;
4272
4273 // If this pending buffer has result metadata, we may be able to send out shutter callback
4274 // and result metadata.
4275 if (iter->resultMetadata != nullptr) {
4276 if (!readyToSend) {
4277 // If any of the previous pending request is not ready, this pending request is
4278 // also not ready to send in order to keep shutter callbacks and result metadata
4279 // in order.
4280 iter++;
4281 continue;
4282 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004283 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004284 // If the result metadata belongs to a live request, notify errors for previous pending
4285 // live requests.
4286 mPendingLiveRequest--;
4287
4288 CameraMetadata dummyMetadata;
4289 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4290 result.result = dummyMetadata.release();
4291
4292 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004293
4294 // partial_result should be PARTIAL_RESULT_CNT in case of
4295 // ERROR_RESULT.
4296 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4297 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004298 } else {
4299 iter++;
4300 continue;
4301 }
4302
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004303 result.output_buffers = nullptr;
4304 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004305 orchestrateResult(&result);
4306
4307 // For reprocessing, result metadata is the same as settings so do not free it here to
4308 // avoid double free.
4309 if (result.result != iter->settings) {
4310 free_camera_metadata((camera_metadata_t *)result.result);
4311 }
4312 iter->resultMetadata = nullptr;
4313 iter = erasePendingRequest(iter);
4314 }
4315
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004316 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004317 for (auto &iter : mPendingRequestsList) {
4318 // Increment pipeline depth for the following pending requests.
4319 if (iter.frame_number > frameNumber) {
4320 iter.pipeline_depth++;
4321 }
4322 }
4323 }
4324
4325 unblockRequestIfNecessary();
4326}
4327
Thierry Strudel3d639192016-09-09 11:52:26 -07004328/*===========================================================================
4329 * FUNCTION : unblockRequestIfNecessary
4330 *
4331 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4332 * that mMutex is held when this function is called.
4333 *
4334 * PARAMETERS :
4335 *
4336 * RETURN :
4337 *
4338 *==========================================================================*/
4339void QCamera3HardwareInterface::unblockRequestIfNecessary()
4340{
4341 // Unblock process_capture_request
4342 pthread_cond_signal(&mRequestCond);
4343}
4344
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004345/*===========================================================================
4346 * FUNCTION : isHdrSnapshotRequest
4347 *
4348 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4349 *
4350 * PARAMETERS : camera3 request structure
4351 *
4352 * RETURN : boolean decision variable
4353 *
4354 *==========================================================================*/
4355bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4356{
4357 if (request == NULL) {
4358 LOGE("Invalid request handle");
4359 assert(0);
4360 return false;
4361 }
4362
4363 if (!mForceHdrSnapshot) {
4364 CameraMetadata frame_settings;
4365 frame_settings = request->settings;
4366
4367 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4368 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4369 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4370 return false;
4371 }
4372 } else {
4373 return false;
4374 }
4375
4376 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4377 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4378 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4379 return false;
4380 }
4381 } else {
4382 return false;
4383 }
4384 }
4385
4386 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4387 if (request->output_buffers[i].stream->format
4388 == HAL_PIXEL_FORMAT_BLOB) {
4389 return true;
4390 }
4391 }
4392
4393 return false;
4394}
4395/*===========================================================================
4396 * FUNCTION : orchestrateRequest
4397 *
4398 * DESCRIPTION: Orchestrates a capture request from camera service
4399 *
4400 * PARAMETERS :
4401 * @request : request from framework to process
4402 *
4403 * RETURN : Error status codes
4404 *
4405 *==========================================================================*/
4406int32_t QCamera3HardwareInterface::orchestrateRequest(
4407 camera3_capture_request_t *request)
4408{
4409
4410 uint32_t originalFrameNumber = request->frame_number;
4411 uint32_t originalOutputCount = request->num_output_buffers;
4412 const camera_metadata_t *original_settings = request->settings;
4413 List<InternalRequest> internallyRequestedStreams;
4414 List<InternalRequest> emptyInternalList;
4415
4416 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4417 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4418 uint32_t internalFrameNumber;
4419 CameraMetadata modified_meta;
4420
4421
4422 /* Add Blob channel to list of internally requested streams */
4423 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4424 if (request->output_buffers[i].stream->format
4425 == HAL_PIXEL_FORMAT_BLOB) {
4426 InternalRequest streamRequested;
4427 streamRequested.meteringOnly = 1;
4428 streamRequested.need_metadata = 0;
4429 streamRequested.stream = request->output_buffers[i].stream;
4430 internallyRequestedStreams.push_back(streamRequested);
4431 }
4432 }
4433 request->num_output_buffers = 0;
4434 auto itr = internallyRequestedStreams.begin();
4435
4436 /* Modify setting to set compensation */
4437 modified_meta = request->settings;
4438 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4439 uint8_t aeLock = 1;
4440 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4441 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4442 camera_metadata_t *modified_settings = modified_meta.release();
4443 request->settings = modified_settings;
4444
4445 /* Capture Settling & -2x frame */
4446 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4447 request->frame_number = internalFrameNumber;
4448 processCaptureRequest(request, internallyRequestedStreams);
4449
4450 request->num_output_buffers = originalOutputCount;
4451 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4452 request->frame_number = internalFrameNumber;
4453 processCaptureRequest(request, emptyInternalList);
4454 request->num_output_buffers = 0;
4455
4456 modified_meta = modified_settings;
4457 expCompensation = 0;
4458 aeLock = 1;
4459 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4460 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4461 modified_settings = modified_meta.release();
4462 request->settings = modified_settings;
4463
4464 /* Capture Settling & 0X frame */
4465
4466 itr = internallyRequestedStreams.begin();
4467 if (itr == internallyRequestedStreams.end()) {
4468 LOGE("Error Internally Requested Stream list is empty");
4469 assert(0);
4470 } else {
4471 itr->need_metadata = 0;
4472 itr->meteringOnly = 1;
4473 }
4474
4475 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4476 request->frame_number = internalFrameNumber;
4477 processCaptureRequest(request, internallyRequestedStreams);
4478
4479 itr = internallyRequestedStreams.begin();
4480 if (itr == internallyRequestedStreams.end()) {
4481 ALOGE("Error Internally Requested Stream list is empty");
4482 assert(0);
4483 } else {
4484 itr->need_metadata = 1;
4485 itr->meteringOnly = 0;
4486 }
4487
4488 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4489 request->frame_number = internalFrameNumber;
4490 processCaptureRequest(request, internallyRequestedStreams);
4491
4492 /* Capture 2X frame*/
4493 modified_meta = modified_settings;
4494 expCompensation = GB_HDR_2X_STEP_EV;
4495 aeLock = 1;
4496 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4497 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4498 modified_settings = modified_meta.release();
4499 request->settings = modified_settings;
4500
4501 itr = internallyRequestedStreams.begin();
4502 if (itr == internallyRequestedStreams.end()) {
4503 ALOGE("Error Internally Requested Stream list is empty");
4504 assert(0);
4505 } else {
4506 itr->need_metadata = 0;
4507 itr->meteringOnly = 1;
4508 }
4509 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4510 request->frame_number = internalFrameNumber;
4511 processCaptureRequest(request, internallyRequestedStreams);
4512
4513 itr = internallyRequestedStreams.begin();
4514 if (itr == internallyRequestedStreams.end()) {
4515 ALOGE("Error Internally Requested Stream list is empty");
4516 assert(0);
4517 } else {
4518 itr->need_metadata = 1;
4519 itr->meteringOnly = 0;
4520 }
4521
4522 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4523 request->frame_number = internalFrameNumber;
4524 processCaptureRequest(request, internallyRequestedStreams);
4525
4526
4527 /* Capture 2X on original streaming config*/
4528 internallyRequestedStreams.clear();
4529
4530 /* Restore original settings pointer */
4531 request->settings = original_settings;
4532 } else {
4533 uint32_t internalFrameNumber;
4534 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4535 request->frame_number = internalFrameNumber;
4536 return processCaptureRequest(request, internallyRequestedStreams);
4537 }
4538
4539 return NO_ERROR;
4540}
4541
4542/*===========================================================================
4543 * FUNCTION : orchestrateResult
4544 *
4545 * DESCRIPTION: Orchestrates a capture result to camera service
4546 *
4547 * PARAMETERS :
4548 * @request : request from framework to process
4549 *
4550 * RETURN :
4551 *
4552 *==========================================================================*/
4553void QCamera3HardwareInterface::orchestrateResult(
4554 camera3_capture_result_t *result)
4555{
4556 uint32_t frameworkFrameNumber;
4557 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4558 frameworkFrameNumber);
4559 if (rc != NO_ERROR) {
4560 LOGE("Cannot find translated frameworkFrameNumber");
4561 assert(0);
4562 } else {
4563 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004564 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004565 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004566 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004567 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4568 camera_metadata_entry_t entry;
4569 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4570 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004571 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004572 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4573 if (ret != OK)
4574 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004575 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004576 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004577 result->frame_number = frameworkFrameNumber;
4578 mCallbackOps->process_capture_result(mCallbackOps, result);
4579 }
4580 }
4581}
4582
4583/*===========================================================================
4584 * FUNCTION : orchestrateNotify
4585 *
4586 * DESCRIPTION: Orchestrates a notify to camera service
4587 *
4588 * PARAMETERS :
4589 * @request : request from framework to process
4590 *
4591 * RETURN :
4592 *
4593 *==========================================================================*/
4594void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4595{
4596 uint32_t frameworkFrameNumber;
4597 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004598 int32_t rc = NO_ERROR;
4599
4600 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004601 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004602
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004603 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004604 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4605 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4606 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004607 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004608 LOGE("Cannot find translated frameworkFrameNumber");
4609 assert(0);
4610 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004611 }
4612 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004613
4614 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4615 LOGD("Internal Request drop the notifyCb");
4616 } else {
4617 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4618 mCallbackOps->notify(mCallbackOps, notify_msg);
4619 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004620}
4621
4622/*===========================================================================
4623 * FUNCTION : FrameNumberRegistry
4624 *
4625 * DESCRIPTION: Constructor
4626 *
4627 * PARAMETERS :
4628 *
4629 * RETURN :
4630 *
4631 *==========================================================================*/
4632FrameNumberRegistry::FrameNumberRegistry()
4633{
4634 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4635}
4636
4637/*===========================================================================
4638 * FUNCTION : ~FrameNumberRegistry
4639 *
4640 * DESCRIPTION: Destructor
4641 *
4642 * PARAMETERS :
4643 *
4644 * RETURN :
4645 *
4646 *==========================================================================*/
4647FrameNumberRegistry::~FrameNumberRegistry()
4648{
4649}
4650
4651/*===========================================================================
4652 * FUNCTION : PurgeOldEntriesLocked
4653 *
4654 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4655 *
4656 * PARAMETERS :
4657 *
4658 * RETURN : NONE
4659 *
4660 *==========================================================================*/
4661void FrameNumberRegistry::purgeOldEntriesLocked()
4662{
4663 while (_register.begin() != _register.end()) {
4664 auto itr = _register.begin();
4665 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4666 _register.erase(itr);
4667 } else {
4668 return;
4669 }
4670 }
4671}
4672
4673/*===========================================================================
4674 * FUNCTION : allocStoreInternalFrameNumber
4675 *
4676 * DESCRIPTION: Method to note down a framework request and associate a new
4677 * internal request number against it
4678 *
4679 * PARAMETERS :
4680 * @fFrameNumber: Identifier given by framework
4681 * @internalFN : Output parameter which will have the newly generated internal
4682 * entry
4683 *
4684 * RETURN : Error code
4685 *
4686 *==========================================================================*/
4687int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4688 uint32_t &internalFrameNumber)
4689{
4690 Mutex::Autolock lock(mRegistryLock);
4691 internalFrameNumber = _nextFreeInternalNumber++;
4692 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4693 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4694 purgeOldEntriesLocked();
4695 return NO_ERROR;
4696}
4697
4698/*===========================================================================
4699 * FUNCTION : generateStoreInternalFrameNumber
4700 *
4701 * DESCRIPTION: Method to associate a new internal request number independent
4702 * of any associate with framework requests
4703 *
4704 * PARAMETERS :
4705 * @internalFrame#: Output parameter which will have the newly generated internal
4706 *
4707 *
4708 * RETURN : Error code
4709 *
4710 *==========================================================================*/
4711int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4712{
4713 Mutex::Autolock lock(mRegistryLock);
4714 internalFrameNumber = _nextFreeInternalNumber++;
4715 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4716 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4717 purgeOldEntriesLocked();
4718 return NO_ERROR;
4719}
4720
4721/*===========================================================================
4722 * FUNCTION : getFrameworkFrameNumber
4723 *
4724 * DESCRIPTION: Method to query the framework framenumber given an internal #
4725 *
4726 * PARAMETERS :
4727 * @internalFrame#: Internal reference
4728 * @frameworkframenumber: Output parameter holding framework frame entry
4729 *
4730 * RETURN : Error code
4731 *
4732 *==========================================================================*/
4733int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4734 uint32_t &frameworkFrameNumber)
4735{
4736 Mutex::Autolock lock(mRegistryLock);
4737 auto itr = _register.find(internalFrameNumber);
4738 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004739 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004740 return -ENOENT;
4741 }
4742
4743 frameworkFrameNumber = itr->second;
4744 purgeOldEntriesLocked();
4745 return NO_ERROR;
4746}
Thierry Strudel3d639192016-09-09 11:52:26 -07004747
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004748status_t QCamera3HardwareInterface::fillPbStreamConfig(
4749 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4750 QCamera3Channel *channel, uint32_t streamIndex) {
4751 if (config == nullptr) {
4752 LOGE("%s: config is null", __FUNCTION__);
4753 return BAD_VALUE;
4754 }
4755
4756 if (channel == nullptr) {
4757 LOGE("%s: channel is null", __FUNCTION__);
4758 return BAD_VALUE;
4759 }
4760
4761 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4762 if (stream == nullptr) {
4763 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4764 return NAME_NOT_FOUND;
4765 }
4766
4767 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4768 if (streamInfo == nullptr) {
4769 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4770 return NAME_NOT_FOUND;
4771 }
4772
4773 config->id = pbStreamId;
4774 config->image.width = streamInfo->dim.width;
4775 config->image.height = streamInfo->dim.height;
4776 config->image.padding = 0;
4777 config->image.format = pbStreamFormat;
4778
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004779 uint32_t totalPlaneSize = 0;
4780
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004781 // Fill plane information.
4782 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4783 pbcamera::PlaneConfiguration plane;
4784 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4785 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4786 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004787
4788 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004789 }
4790
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004791 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004792 return OK;
4793}
4794
Thierry Strudel3d639192016-09-09 11:52:26 -07004795/*===========================================================================
4796 * FUNCTION : processCaptureRequest
4797 *
4798 * DESCRIPTION: process a capture request from camera service
4799 *
4800 * PARAMETERS :
4801 * @request : request from framework to process
4802 *
4803 * RETURN :
4804 *
4805 *==========================================================================*/
4806int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004807 camera3_capture_request_t *request,
4808 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004809{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004810 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004811 int rc = NO_ERROR;
4812 int32_t request_id;
4813 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004814 bool isVidBufRequested = false;
4815 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004816 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004817
4818 pthread_mutex_lock(&mMutex);
4819
4820 // Validate current state
4821 switch (mState) {
4822 case CONFIGURED:
4823 case STARTED:
4824 /* valid state */
4825 break;
4826
4827 case ERROR:
4828 pthread_mutex_unlock(&mMutex);
4829 handleCameraDeviceError();
4830 return -ENODEV;
4831
4832 default:
4833 LOGE("Invalid state %d", mState);
4834 pthread_mutex_unlock(&mMutex);
4835 return -ENODEV;
4836 }
4837
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004838 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004839 if (rc != NO_ERROR) {
4840 LOGE("incoming request is not valid");
4841 pthread_mutex_unlock(&mMutex);
4842 return rc;
4843 }
4844
4845 meta = request->settings;
4846
4847 // For first capture request, send capture intent, and
4848 // stream on all streams
4849 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004850 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004851 // send an unconfigure to the backend so that the isp
4852 // resources are deallocated
4853 if (!mFirstConfiguration) {
4854 cam_stream_size_info_t stream_config_info;
4855 int32_t hal_version = CAM_HAL_V3;
4856 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4857 stream_config_info.buffer_info.min_buffers =
4858 MIN_INFLIGHT_REQUESTS;
4859 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004860 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004861 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004862 clear_metadata_buffer(mParameters);
4863 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4864 CAM_INTF_PARM_HAL_VERSION, hal_version);
4865 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4866 CAM_INTF_META_STREAM_INFO, stream_config_info);
4867 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4868 mParameters);
4869 if (rc < 0) {
4870 LOGE("set_parms for unconfigure failed");
4871 pthread_mutex_unlock(&mMutex);
4872 return rc;
4873 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004874
Thierry Strudel3d639192016-09-09 11:52:26 -07004875 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004876 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004877 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004878 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004879 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004880 property_get("persist.camera.is_type", is_type_value, "4");
4881 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4882 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4883 property_get("persist.camera.is_type_preview", is_type_value, "4");
4884 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4885 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004886
4887 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4888 int32_t hal_version = CAM_HAL_V3;
4889 uint8_t captureIntent =
4890 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4891 mCaptureIntent = captureIntent;
4892 clear_metadata_buffer(mParameters);
4893 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4894 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4895 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004896 if (mFirstConfiguration) {
4897 // configure instant AEC
4898 // Instant AEC is a session based parameter and it is needed only
4899 // once per complete session after open camera.
4900 // i.e. This is set only once for the first capture request, after open camera.
4901 setInstantAEC(meta);
4902 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004903 uint8_t fwkVideoStabMode=0;
4904 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4905 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4906 }
4907
Xue Tuecac74e2017-04-17 13:58:15 -07004908 // If EIS setprop is enabled then only turn it on for video/preview
4909 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004910 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004911 int32_t vsMode;
4912 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4913 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4914 rc = BAD_VALUE;
4915 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004916 LOGD("setEis %d", setEis);
4917 bool eis3Supported = false;
4918 size_t count = IS_TYPE_MAX;
4919 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4920 for (size_t i = 0; i < count; i++) {
4921 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4922 eis3Supported = true;
4923 break;
4924 }
4925 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004926
4927 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004928 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004929 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4930 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004931 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4932 is_type = isTypePreview;
4933 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4934 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4935 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004936 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004937 } else {
4938 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004939 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004940 } else {
4941 is_type = IS_TYPE_NONE;
4942 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004943 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004944 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004945 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4946 }
4947 }
4948
4949 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4950 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4951
Thierry Strudel54dc9782017-02-15 12:12:10 -08004952 //Disable tintless only if the property is set to 0
4953 memset(prop, 0, sizeof(prop));
4954 property_get("persist.camera.tintless.enable", prop, "1");
4955 int32_t tintless_value = atoi(prop);
4956
Thierry Strudel3d639192016-09-09 11:52:26 -07004957 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4958 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004959
Thierry Strudel3d639192016-09-09 11:52:26 -07004960 //Disable CDS for HFR mode or if DIS/EIS is on.
4961 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4962 //after every configure_stream
4963 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4964 (m_bIsVideo)) {
4965 int32_t cds = CAM_CDS_MODE_OFF;
4966 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4967 CAM_INTF_PARM_CDS_MODE, cds))
4968 LOGE("Failed to disable CDS for HFR mode");
4969
4970 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004971
4972 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4973 uint8_t* use_av_timer = NULL;
4974
4975 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004976 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004977 use_av_timer = &m_debug_avtimer;
4978 }
4979 else{
4980 use_av_timer =
4981 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004982 if (use_av_timer) {
4983 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4984 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004985 }
4986
4987 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4988 rc = BAD_VALUE;
4989 }
4990 }
4991
Thierry Strudel3d639192016-09-09 11:52:26 -07004992 setMobicat();
4993
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004994 uint8_t nrMode = 0;
4995 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4996 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4997 }
4998
Thierry Strudel3d639192016-09-09 11:52:26 -07004999 /* Set fps and hfr mode while sending meta stream info so that sensor
5000 * can configure appropriate streaming mode */
5001 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005002 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5003 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005004 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5005 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005006 if (rc == NO_ERROR) {
5007 int32_t max_fps =
5008 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005009 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005010 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5011 }
5012 /* For HFR, more buffers are dequeued upfront to improve the performance */
5013 if (mBatchSize) {
5014 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5015 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5016 }
5017 }
5018 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005019 LOGE("setHalFpsRange failed");
5020 }
5021 }
5022 if (meta.exists(ANDROID_CONTROL_MODE)) {
5023 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5024 rc = extractSceneMode(meta, metaMode, mParameters);
5025 if (rc != NO_ERROR) {
5026 LOGE("extractSceneMode failed");
5027 }
5028 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005029 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005030
Thierry Strudel04e026f2016-10-10 11:27:36 -07005031 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5032 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5033 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5034 rc = setVideoHdrMode(mParameters, vhdr);
5035 if (rc != NO_ERROR) {
5036 LOGE("setVideoHDR is failed");
5037 }
5038 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005039
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005040 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005041 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005042 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005043 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5044 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5045 sensorModeFullFov)) {
5046 rc = BAD_VALUE;
5047 }
5048 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005049 //TODO: validate the arguments, HSV scenemode should have only the
5050 //advertised fps ranges
5051
5052 /*set the capture intent, hal version, tintless, stream info,
5053 *and disenable parameters to the backend*/
5054 LOGD("set_parms META_STREAM_INFO " );
5055 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005056 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5057 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005058 mStreamConfigInfo.type[i],
5059 mStreamConfigInfo.stream_sizes[i].width,
5060 mStreamConfigInfo.stream_sizes[i].height,
5061 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005062 mStreamConfigInfo.format[i],
5063 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005064 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005065
Thierry Strudel3d639192016-09-09 11:52:26 -07005066 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5067 mParameters);
5068 if (rc < 0) {
5069 LOGE("set_parms failed for hal version, stream info");
5070 }
5071
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005072 cam_sensor_mode_info_t sensorModeInfo = {};
5073 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005074 if (rc != NO_ERROR) {
5075 LOGE("Failed to get sensor output size");
5076 pthread_mutex_unlock(&mMutex);
5077 goto error_exit;
5078 }
5079
5080 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5081 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005082 sensorModeInfo.active_array_size.width,
5083 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005084
5085 /* Set batchmode before initializing channel. Since registerBuffer
5086 * internally initializes some of the channels, better set batchmode
5087 * even before first register buffer */
5088 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5089 it != mStreamInfo.end(); it++) {
5090 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5091 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5092 && mBatchSize) {
5093 rc = channel->setBatchSize(mBatchSize);
5094 //Disable per frame map unmap for HFR/batchmode case
5095 rc |= channel->setPerFrameMapUnmap(false);
5096 if (NO_ERROR != rc) {
5097 LOGE("Channel init failed %d", rc);
5098 pthread_mutex_unlock(&mMutex);
5099 goto error_exit;
5100 }
5101 }
5102 }
5103
5104 //First initialize all streams
5105 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5106 it != mStreamInfo.end(); it++) {
5107 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005108
5109 /* Initial value of NR mode is needed before stream on */
5110 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005111 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5112 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005113 setEis) {
5114 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5115 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5116 is_type = mStreamConfigInfo.is_type[i];
5117 break;
5118 }
5119 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005120 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005121 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005122 rc = channel->initialize(IS_TYPE_NONE);
5123 }
5124 if (NO_ERROR != rc) {
5125 LOGE("Channel initialization failed %d", rc);
5126 pthread_mutex_unlock(&mMutex);
5127 goto error_exit;
5128 }
5129 }
5130
5131 if (mRawDumpChannel) {
5132 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5133 if (rc != NO_ERROR) {
5134 LOGE("Error: Raw Dump Channel init failed");
5135 pthread_mutex_unlock(&mMutex);
5136 goto error_exit;
5137 }
5138 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005139 if (mHdrPlusRawSrcChannel) {
5140 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5141 if (rc != NO_ERROR) {
5142 LOGE("Error: HDR+ RAW Source Channel init failed");
5143 pthread_mutex_unlock(&mMutex);
5144 goto error_exit;
5145 }
5146 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005147 if (mSupportChannel) {
5148 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5149 if (rc < 0) {
5150 LOGE("Support channel initialization failed");
5151 pthread_mutex_unlock(&mMutex);
5152 goto error_exit;
5153 }
5154 }
5155 if (mAnalysisChannel) {
5156 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5157 if (rc < 0) {
5158 LOGE("Analysis channel initialization failed");
5159 pthread_mutex_unlock(&mMutex);
5160 goto error_exit;
5161 }
5162 }
5163 if (mDummyBatchChannel) {
5164 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5165 if (rc < 0) {
5166 LOGE("mDummyBatchChannel setBatchSize failed");
5167 pthread_mutex_unlock(&mMutex);
5168 goto error_exit;
5169 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005170 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005171 if (rc < 0) {
5172 LOGE("mDummyBatchChannel initialization failed");
5173 pthread_mutex_unlock(&mMutex);
5174 goto error_exit;
5175 }
5176 }
5177
5178 // Set bundle info
5179 rc = setBundleInfo();
5180 if (rc < 0) {
5181 LOGE("setBundleInfo failed %d", rc);
5182 pthread_mutex_unlock(&mMutex);
5183 goto error_exit;
5184 }
5185
5186 //update settings from app here
5187 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5188 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5189 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5190 }
5191 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5192 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5193 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5194 }
5195 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5196 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5197 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5198
5199 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5200 (mLinkedCameraId != mCameraId) ) {
5201 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5202 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005203 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005204 goto error_exit;
5205 }
5206 }
5207
5208 // add bundle related cameras
5209 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5210 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005211 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5212 &m_pDualCamCmdPtr->bundle_info;
5213 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005214 if (mIsDeviceLinked)
5215 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5216 else
5217 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5218
5219 pthread_mutex_lock(&gCamLock);
5220
5221 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5222 LOGE("Dualcam: Invalid Session Id ");
5223 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005224 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005225 goto error_exit;
5226 }
5227
5228 if (mIsMainCamera == 1) {
5229 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5230 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005231 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005232 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005233 // related session id should be session id of linked session
5234 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5235 } else {
5236 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5237 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005238 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005239 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005240 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5241 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005242 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005243 pthread_mutex_unlock(&gCamLock);
5244
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005245 rc = mCameraHandle->ops->set_dual_cam_cmd(
5246 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005247 if (rc < 0) {
5248 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005249 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005250 goto error_exit;
5251 }
5252 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005253 goto no_error;
5254error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005255 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005256 return rc;
5257no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005258 mWokenUpByDaemon = false;
5259 mPendingLiveRequest = 0;
5260 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005261 }
5262
5263 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005264 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005265
5266 if (mFlushPerf) {
5267 //we cannot accept any requests during flush
5268 LOGE("process_capture_request cannot proceed during flush");
5269 pthread_mutex_unlock(&mMutex);
5270 return NO_ERROR; //should return an error
5271 }
5272
5273 if (meta.exists(ANDROID_REQUEST_ID)) {
5274 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5275 mCurrentRequestId = request_id;
5276 LOGD("Received request with id: %d", request_id);
5277 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5278 LOGE("Unable to find request id field, \
5279 & no previous id available");
5280 pthread_mutex_unlock(&mMutex);
5281 return NAME_NOT_FOUND;
5282 } else {
5283 LOGD("Re-using old request id");
5284 request_id = mCurrentRequestId;
5285 }
5286
5287 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5288 request->num_output_buffers,
5289 request->input_buffer,
5290 frameNumber);
5291 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005292 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005293 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005294 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005295 uint32_t snapshotStreamId = 0;
5296 for (size_t i = 0; i < request->num_output_buffers; i++) {
5297 const camera3_stream_buffer_t& output = request->output_buffers[i];
5298 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5299
Emilian Peev7650c122017-01-19 08:24:33 -08005300 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5301 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005302 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005303 blob_request = 1;
5304 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5305 }
5306
5307 if (output.acquire_fence != -1) {
5308 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5309 close(output.acquire_fence);
5310 if (rc != OK) {
5311 LOGE("sync wait failed %d", rc);
5312 pthread_mutex_unlock(&mMutex);
5313 return rc;
5314 }
5315 }
5316
Emilian Peev0f3c3162017-03-15 12:57:46 +00005317 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5318 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005319 depthRequestPresent = true;
5320 continue;
5321 }
5322
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005323 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005324 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005325
5326 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5327 isVidBufRequested = true;
5328 }
5329 }
5330
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005331 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5332 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5333 itr++) {
5334 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5335 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5336 channel->getStreamID(channel->getStreamTypeMask());
5337
5338 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5339 isVidBufRequested = true;
5340 }
5341 }
5342
Thierry Strudel3d639192016-09-09 11:52:26 -07005343 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005344 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005345 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005346 }
5347 if (blob_request && mRawDumpChannel) {
5348 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005349 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005350 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005351 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005352 }
5353
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005354 {
5355 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5356 // Request a RAW buffer if
5357 // 1. mHdrPlusRawSrcChannel is valid.
5358 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5359 // 3. There is no pending HDR+ request.
5360 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5361 mHdrPlusPendingRequests.size() == 0) {
5362 streamsArray.stream_request[streamsArray.num_streams].streamID =
5363 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5364 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5365 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005366 }
5367
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005368 //extract capture intent
5369 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5370 mCaptureIntent =
5371 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5372 }
5373
5374 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5375 mCacMode =
5376 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5377 }
5378
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005379 uint8_t requestedLensShadingMapMode;
5380 // Get the shading map mode.
5381 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5382 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5383 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5384 } else {
5385 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5386 }
5387
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005388 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005389 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005390
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005391 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005392 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005393 // If this request has a still capture intent, try to submit an HDR+ request.
5394 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5395 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5396 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5397 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005398 }
5399
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005400 if (hdrPlusRequest) {
5401 // For a HDR+ request, just set the frame parameters.
5402 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5403 if (rc < 0) {
5404 LOGE("fail to set frame parameters");
5405 pthread_mutex_unlock(&mMutex);
5406 return rc;
5407 }
5408 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005409 /* Parse the settings:
5410 * - For every request in NORMAL MODE
5411 * - For every request in HFR mode during preview only case
5412 * - For first request of every batch in HFR mode during video
5413 * recording. In batchmode the same settings except frame number is
5414 * repeated in each request of the batch.
5415 */
5416 if (!mBatchSize ||
5417 (mBatchSize && !isVidBufRequested) ||
5418 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005419 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005420 if (rc < 0) {
5421 LOGE("fail to set frame parameters");
5422 pthread_mutex_unlock(&mMutex);
5423 return rc;
5424 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005425
5426 {
5427 // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
5428 // will be reported in result metadata.
5429 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5430 if (mHdrPlusModeEnabled) {
5431 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5432 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5433 }
5434 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005435 }
5436 /* For batchMode HFR, setFrameParameters is not called for every
5437 * request. But only frame number of the latest request is parsed.
5438 * Keep track of first and last frame numbers in a batch so that
5439 * metadata for the frame numbers of batch can be duplicated in
5440 * handleBatchMetadta */
5441 if (mBatchSize) {
5442 if (!mToBeQueuedVidBufs) {
5443 //start of the batch
5444 mFirstFrameNumberInBatch = request->frame_number;
5445 }
5446 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5447 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5448 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005449 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005450 return BAD_VALUE;
5451 }
5452 }
5453 if (mNeedSensorRestart) {
5454 /* Unlock the mutex as restartSensor waits on the channels to be
5455 * stopped, which in turn calls stream callback functions -
5456 * handleBufferWithLock and handleMetadataWithLock */
5457 pthread_mutex_unlock(&mMutex);
5458 rc = dynamicUpdateMetaStreamInfo();
5459 if (rc != NO_ERROR) {
5460 LOGE("Restarting the sensor failed");
5461 return BAD_VALUE;
5462 }
5463 mNeedSensorRestart = false;
5464 pthread_mutex_lock(&mMutex);
5465 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005466 if(mResetInstantAEC) {
5467 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5468 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5469 mResetInstantAEC = false;
5470 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005471 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005472 if (request->input_buffer->acquire_fence != -1) {
5473 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5474 close(request->input_buffer->acquire_fence);
5475 if (rc != OK) {
5476 LOGE("input buffer sync wait failed %d", rc);
5477 pthread_mutex_unlock(&mMutex);
5478 return rc;
5479 }
5480 }
5481 }
5482
5483 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5484 mLastCustIntentFrmNum = frameNumber;
5485 }
5486 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005487 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005488 pendingRequestIterator latestRequest;
5489 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005490 pendingRequest.num_buffers = depthRequestPresent ?
5491 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005492 pendingRequest.request_id = request_id;
5493 pendingRequest.blob_request = blob_request;
5494 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005495 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005496 if (request->input_buffer) {
5497 pendingRequest.input_buffer =
5498 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5499 *(pendingRequest.input_buffer) = *(request->input_buffer);
5500 pInputBuffer = pendingRequest.input_buffer;
5501 } else {
5502 pendingRequest.input_buffer = NULL;
5503 pInputBuffer = NULL;
5504 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005505 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005506
5507 pendingRequest.pipeline_depth = 0;
5508 pendingRequest.partial_result_cnt = 0;
5509 extractJpegMetadata(mCurJpegMeta, request);
5510 pendingRequest.jpegMetadata = mCurJpegMeta;
5511 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005512 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005513 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5514 mHybridAeEnable =
5515 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5516 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005517
5518 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5519 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005520 /* DevCamDebug metadata processCaptureRequest */
5521 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5522 mDevCamDebugMetaEnable =
5523 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5524 }
5525 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5526 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005527
5528 //extract CAC info
5529 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5530 mCacMode =
5531 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5532 }
5533 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005534 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005535 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5536 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005537
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005538 // extract enableZsl info
5539 if (gExposeEnableZslKey) {
5540 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5541 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5542 mZslEnabled = pendingRequest.enableZsl;
5543 } else {
5544 pendingRequest.enableZsl = mZslEnabled;
5545 }
5546 }
5547
Thierry Strudel3d639192016-09-09 11:52:26 -07005548 PendingBuffersInRequest bufsForCurRequest;
5549 bufsForCurRequest.frame_number = frameNumber;
5550 // Mark current timestamp for the new request
5551 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005552 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005553
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005554 if (hdrPlusRequest) {
5555 // Save settings for this request.
5556 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5557 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5558
5559 // Add to pending HDR+ request queue.
5560 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5561 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5562
5563 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5564 }
5565
Thierry Strudel3d639192016-09-09 11:52:26 -07005566 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005567 if ((request->output_buffers[i].stream->data_space ==
5568 HAL_DATASPACE_DEPTH) &&
5569 (HAL_PIXEL_FORMAT_BLOB ==
5570 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005571 continue;
5572 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005573 RequestedBufferInfo requestedBuf;
5574 memset(&requestedBuf, 0, sizeof(requestedBuf));
5575 requestedBuf.stream = request->output_buffers[i].stream;
5576 requestedBuf.buffer = NULL;
5577 pendingRequest.buffers.push_back(requestedBuf);
5578
5579 // Add to buffer handle the pending buffers list
5580 PendingBufferInfo bufferInfo;
5581 bufferInfo.buffer = request->output_buffers[i].buffer;
5582 bufferInfo.stream = request->output_buffers[i].stream;
5583 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5584 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5585 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5586 frameNumber, bufferInfo.buffer,
5587 channel->getStreamTypeMask(), bufferInfo.stream->format);
5588 }
5589 // Add this request packet into mPendingBuffersMap
5590 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5591 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5592 mPendingBuffersMap.get_num_overall_buffers());
5593
5594 latestRequest = mPendingRequestsList.insert(
5595 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005596
5597 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5598 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005599 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005600 for (size_t i = 0; i < request->num_output_buffers; i++) {
5601 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5602 }
5603
Thierry Strudel3d639192016-09-09 11:52:26 -07005604 if(mFlush) {
5605 LOGI("mFlush is true");
5606 pthread_mutex_unlock(&mMutex);
5607 return NO_ERROR;
5608 }
5609
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005610 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5611 // channel.
5612 if (!hdrPlusRequest) {
5613 int indexUsed;
5614 // Notify metadata channel we receive a request
5615 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005616
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005617 if(request->input_buffer != NULL){
5618 LOGD("Input request, frame_number %d", frameNumber);
5619 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5620 if (NO_ERROR != rc) {
5621 LOGE("fail to set reproc parameters");
5622 pthread_mutex_unlock(&mMutex);
5623 return rc;
5624 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005625 }
5626
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005627 // Call request on other streams
5628 uint32_t streams_need_metadata = 0;
5629 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5630 for (size_t i = 0; i < request->num_output_buffers; i++) {
5631 const camera3_stream_buffer_t& output = request->output_buffers[i];
5632 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5633
5634 if (channel == NULL) {
5635 LOGW("invalid channel pointer for stream");
5636 continue;
5637 }
5638
5639 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5640 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5641 output.buffer, request->input_buffer, frameNumber);
5642 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005643 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005644 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5645 if (rc < 0) {
5646 LOGE("Fail to request on picture channel");
5647 pthread_mutex_unlock(&mMutex);
5648 return rc;
5649 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005650 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005651 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5652 assert(NULL != mDepthChannel);
5653 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005654
Emilian Peev7650c122017-01-19 08:24:33 -08005655 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5656 if (rc < 0) {
5657 LOGE("Fail to map on depth buffer");
5658 pthread_mutex_unlock(&mMutex);
5659 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005660 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005661 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005662 } else {
5663 LOGD("snapshot request with buffer %p, frame_number %d",
5664 output.buffer, frameNumber);
5665 if (!request->settings) {
5666 rc = channel->request(output.buffer, frameNumber,
5667 NULL, mPrevParameters, indexUsed);
5668 } else {
5669 rc = channel->request(output.buffer, frameNumber,
5670 NULL, mParameters, indexUsed);
5671 }
5672 if (rc < 0) {
5673 LOGE("Fail to request on picture channel");
5674 pthread_mutex_unlock(&mMutex);
5675 return rc;
5676 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005677
Emilian Peev7650c122017-01-19 08:24:33 -08005678 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5679 uint32_t j = 0;
5680 for (j = 0; j < streamsArray.num_streams; j++) {
5681 if (streamsArray.stream_request[j].streamID == streamId) {
5682 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5683 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5684 else
5685 streamsArray.stream_request[j].buf_index = indexUsed;
5686 break;
5687 }
5688 }
5689 if (j == streamsArray.num_streams) {
5690 LOGE("Did not find matching stream to update index");
5691 assert(0);
5692 }
5693
5694 pendingBufferIter->need_metadata = true;
5695 streams_need_metadata++;
5696 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005697 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005698 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5699 bool needMetadata = false;
5700 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5701 rc = yuvChannel->request(output.buffer, frameNumber,
5702 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5703 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005704 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005705 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005706 pthread_mutex_unlock(&mMutex);
5707 return rc;
5708 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005709
5710 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5711 uint32_t j = 0;
5712 for (j = 0; j < streamsArray.num_streams; j++) {
5713 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005714 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5715 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5716 else
5717 streamsArray.stream_request[j].buf_index = indexUsed;
5718 break;
5719 }
5720 }
5721 if (j == streamsArray.num_streams) {
5722 LOGE("Did not find matching stream to update index");
5723 assert(0);
5724 }
5725
5726 pendingBufferIter->need_metadata = needMetadata;
5727 if (needMetadata)
5728 streams_need_metadata += 1;
5729 LOGD("calling YUV channel request, need_metadata is %d",
5730 needMetadata);
5731 } else {
5732 LOGD("request with buffer %p, frame_number %d",
5733 output.buffer, frameNumber);
5734
5735 rc = channel->request(output.buffer, frameNumber, indexUsed);
5736
5737 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5738 uint32_t j = 0;
5739 for (j = 0; j < streamsArray.num_streams; j++) {
5740 if (streamsArray.stream_request[j].streamID == streamId) {
5741 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5742 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5743 else
5744 streamsArray.stream_request[j].buf_index = indexUsed;
5745 break;
5746 }
5747 }
5748 if (j == streamsArray.num_streams) {
5749 LOGE("Did not find matching stream to update index");
5750 assert(0);
5751 }
5752
5753 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5754 && mBatchSize) {
5755 mToBeQueuedVidBufs++;
5756 if (mToBeQueuedVidBufs == mBatchSize) {
5757 channel->queueBatchBuf();
5758 }
5759 }
5760 if (rc < 0) {
5761 LOGE("request failed");
5762 pthread_mutex_unlock(&mMutex);
5763 return rc;
5764 }
5765 }
5766 pendingBufferIter++;
5767 }
5768
5769 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5770 itr++) {
5771 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5772
5773 if (channel == NULL) {
5774 LOGE("invalid channel pointer for stream");
5775 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005776 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005777 return BAD_VALUE;
5778 }
5779
5780 InternalRequest requestedStream;
5781 requestedStream = (*itr);
5782
5783
5784 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5785 LOGD("snapshot request internally input buffer %p, frame_number %d",
5786 request->input_buffer, frameNumber);
5787 if(request->input_buffer != NULL){
5788 rc = channel->request(NULL, frameNumber,
5789 pInputBuffer, &mReprocMeta, indexUsed, true,
5790 requestedStream.meteringOnly);
5791 if (rc < 0) {
5792 LOGE("Fail to request on picture channel");
5793 pthread_mutex_unlock(&mMutex);
5794 return rc;
5795 }
5796 } else {
5797 LOGD("snapshot request with frame_number %d", frameNumber);
5798 if (!request->settings) {
5799 rc = channel->request(NULL, frameNumber,
5800 NULL, mPrevParameters, indexUsed, true,
5801 requestedStream.meteringOnly);
5802 } else {
5803 rc = channel->request(NULL, frameNumber,
5804 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5805 }
5806 if (rc < 0) {
5807 LOGE("Fail to request on picture channel");
5808 pthread_mutex_unlock(&mMutex);
5809 return rc;
5810 }
5811
5812 if ((*itr).meteringOnly != 1) {
5813 requestedStream.need_metadata = 1;
5814 streams_need_metadata++;
5815 }
5816 }
5817
5818 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5819 uint32_t j = 0;
5820 for (j = 0; j < streamsArray.num_streams; j++) {
5821 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005822 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5823 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5824 else
5825 streamsArray.stream_request[j].buf_index = indexUsed;
5826 break;
5827 }
5828 }
5829 if (j == streamsArray.num_streams) {
5830 LOGE("Did not find matching stream to update index");
5831 assert(0);
5832 }
5833
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005834 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005835 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005836 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005837 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005838 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005839 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005840 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005841 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005842
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005843 //If 2 streams have need_metadata set to true, fail the request, unless
5844 //we copy/reference count the metadata buffer
5845 if (streams_need_metadata > 1) {
5846 LOGE("not supporting request in which two streams requires"
5847 " 2 HAL metadata for reprocessing");
5848 pthread_mutex_unlock(&mMutex);
5849 return -EINVAL;
5850 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005851
Emilian Peev656e4fa2017-06-02 16:47:04 +01005852 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5853 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5854 if (depthRequestPresent && mDepthChannel) {
5855 if (request->settings) {
5856 camera_metadata_ro_entry entry;
5857 if (find_camera_metadata_ro_entry(request->settings,
5858 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5859 if (entry.data.u8[0]) {
5860 pdafEnable = CAM_PD_DATA_ENABLED;
5861 } else {
5862 pdafEnable = CAM_PD_DATA_SKIP;
5863 }
5864 mDepthCloudMode = pdafEnable;
5865 } else {
5866 pdafEnable = mDepthCloudMode;
5867 }
5868 } else {
5869 pdafEnable = mDepthCloudMode;
5870 }
5871 }
5872
Emilian Peev7650c122017-01-19 08:24:33 -08005873 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5874 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5875 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5876 pthread_mutex_unlock(&mMutex);
5877 return BAD_VALUE;
5878 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005879
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005880 if (request->input_buffer == NULL) {
5881 /* Set the parameters to backend:
5882 * - For every request in NORMAL MODE
5883 * - For every request in HFR mode during preview only case
5884 * - Once every batch in HFR mode during video recording
5885 */
5886 if (!mBatchSize ||
5887 (mBatchSize && !isVidBufRequested) ||
5888 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5889 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5890 mBatchSize, isVidBufRequested,
5891 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005892
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005893 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5894 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5895 uint32_t m = 0;
5896 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5897 if (streamsArray.stream_request[k].streamID ==
5898 mBatchedStreamsArray.stream_request[m].streamID)
5899 break;
5900 }
5901 if (m == mBatchedStreamsArray.num_streams) {
5902 mBatchedStreamsArray.stream_request\
5903 [mBatchedStreamsArray.num_streams].streamID =
5904 streamsArray.stream_request[k].streamID;
5905 mBatchedStreamsArray.stream_request\
5906 [mBatchedStreamsArray.num_streams].buf_index =
5907 streamsArray.stream_request[k].buf_index;
5908 mBatchedStreamsArray.num_streams =
5909 mBatchedStreamsArray.num_streams + 1;
5910 }
5911 }
5912 streamsArray = mBatchedStreamsArray;
5913 }
5914 /* Update stream id of all the requested buffers */
5915 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5916 streamsArray)) {
5917 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005918 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005919 return BAD_VALUE;
5920 }
5921
5922 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5923 mParameters);
5924 if (rc < 0) {
5925 LOGE("set_parms failed");
5926 }
5927 /* reset to zero coz, the batch is queued */
5928 mToBeQueuedVidBufs = 0;
5929 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5930 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5931 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005932 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5933 uint32_t m = 0;
5934 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5935 if (streamsArray.stream_request[k].streamID ==
5936 mBatchedStreamsArray.stream_request[m].streamID)
5937 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005938 }
5939 if (m == mBatchedStreamsArray.num_streams) {
5940 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5941 streamID = streamsArray.stream_request[k].streamID;
5942 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5943 buf_index = streamsArray.stream_request[k].buf_index;
5944 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5945 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005946 }
5947 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005948 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005949
5950 // Start all streams after the first setting is sent, so that the
5951 // setting can be applied sooner: (0 + apply_delay)th frame.
5952 if (mState == CONFIGURED && mChannelHandle) {
5953 //Then start them.
5954 LOGH("Start META Channel");
5955 rc = mMetadataChannel->start();
5956 if (rc < 0) {
5957 LOGE("META channel start failed");
5958 pthread_mutex_unlock(&mMutex);
5959 return rc;
5960 }
5961
5962 if (mAnalysisChannel) {
5963 rc = mAnalysisChannel->start();
5964 if (rc < 0) {
5965 LOGE("Analysis channel start failed");
5966 mMetadataChannel->stop();
5967 pthread_mutex_unlock(&mMutex);
5968 return rc;
5969 }
5970 }
5971
5972 if (mSupportChannel) {
5973 rc = mSupportChannel->start();
5974 if (rc < 0) {
5975 LOGE("Support channel start failed");
5976 mMetadataChannel->stop();
5977 /* Although support and analysis are mutually exclusive today
5978 adding it in anycase for future proofing */
5979 if (mAnalysisChannel) {
5980 mAnalysisChannel->stop();
5981 }
5982 pthread_mutex_unlock(&mMutex);
5983 return rc;
5984 }
5985 }
5986 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5987 it != mStreamInfo.end(); it++) {
5988 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5989 LOGH("Start Processing Channel mask=%d",
5990 channel->getStreamTypeMask());
5991 rc = channel->start();
5992 if (rc < 0) {
5993 LOGE("channel start failed");
5994 pthread_mutex_unlock(&mMutex);
5995 return rc;
5996 }
5997 }
5998
5999 if (mRawDumpChannel) {
6000 LOGD("Starting raw dump stream");
6001 rc = mRawDumpChannel->start();
6002 if (rc != NO_ERROR) {
6003 LOGE("Error Starting Raw Dump Channel");
6004 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6005 it != mStreamInfo.end(); it++) {
6006 QCamera3Channel *channel =
6007 (QCamera3Channel *)(*it)->stream->priv;
6008 LOGH("Stopping Processing Channel mask=%d",
6009 channel->getStreamTypeMask());
6010 channel->stop();
6011 }
6012 if (mSupportChannel)
6013 mSupportChannel->stop();
6014 if (mAnalysisChannel) {
6015 mAnalysisChannel->stop();
6016 }
6017 mMetadataChannel->stop();
6018 pthread_mutex_unlock(&mMutex);
6019 return rc;
6020 }
6021 }
6022
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006023 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006024 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006025 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006026 if (rc != NO_ERROR) {
6027 LOGE("start_channel failed %d", rc);
6028 pthread_mutex_unlock(&mMutex);
6029 return rc;
6030 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006031
6032 {
6033 // Configure Easel for stream on.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006034 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07006035
6036 // Now that sensor mode should have been selected, get the selected sensor mode
6037 // info.
6038 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6039 getCurrentSensorModeInfo(mSensorModeInfo);
6040
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006041 if (EaselManagerClientOpened) {
6042 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006043 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6044 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006045 if (rc != OK) {
6046 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6047 mCameraId, mSensorModeInfo.op_pixel_clk);
6048 pthread_mutex_unlock(&mMutex);
6049 return rc;
6050 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07006051 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006052 }
6053 }
6054
6055 // Start sensor streaming.
6056 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6057 mChannelHandle);
6058 if (rc != NO_ERROR) {
6059 LOGE("start_sensor_stream_on failed %d", rc);
6060 pthread_mutex_unlock(&mMutex);
6061 return rc;
6062 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006063 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006064 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006065 }
6066
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006067 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chenjie Luo4a761802017-06-13 17:35:54 +00006068 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006069 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006070 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006071 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6072 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6073 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6074 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006075
6076 if (isSessionHdrPlusModeCompatible()) {
6077 rc = enableHdrPlusModeLocked();
6078 if (rc != OK) {
6079 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6080 pthread_mutex_unlock(&mMutex);
6081 return rc;
6082 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006083 }
6084
6085 mFirstPreviewIntentSeen = true;
6086 }
6087 }
6088
Thierry Strudel3d639192016-09-09 11:52:26 -07006089 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6090
6091 mState = STARTED;
6092 // Added a timed condition wait
6093 struct timespec ts;
6094 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006095 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006096 if (rc < 0) {
6097 isValidTimeout = 0;
6098 LOGE("Error reading the real time clock!!");
6099 }
6100 else {
6101 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006102 int64_t timeout = 5;
6103 {
6104 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6105 // If there is a pending HDR+ request, the following requests may be blocked until the
6106 // HDR+ request is done. So allow a longer timeout.
6107 if (mHdrPlusPendingRequests.size() > 0) {
6108 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6109 }
6110 }
6111 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006112 }
6113 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006114 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006115 (mState != ERROR) && (mState != DEINIT)) {
6116 if (!isValidTimeout) {
6117 LOGD("Blocking on conditional wait");
6118 pthread_cond_wait(&mRequestCond, &mMutex);
6119 }
6120 else {
6121 LOGD("Blocking on timed conditional wait");
6122 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6123 if (rc == ETIMEDOUT) {
6124 rc = -ENODEV;
6125 LOGE("Unblocked on timeout!!!!");
6126 break;
6127 }
6128 }
6129 LOGD("Unblocked");
6130 if (mWokenUpByDaemon) {
6131 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006132 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006133 break;
6134 }
6135 }
6136 pthread_mutex_unlock(&mMutex);
6137
6138 return rc;
6139}
6140
6141/*===========================================================================
6142 * FUNCTION : dump
6143 *
6144 * DESCRIPTION:
6145 *
6146 * PARAMETERS :
6147 *
6148 *
6149 * RETURN :
6150 *==========================================================================*/
6151void QCamera3HardwareInterface::dump(int fd)
6152{
6153 pthread_mutex_lock(&mMutex);
6154 dprintf(fd, "\n Camera HAL3 information Begin \n");
6155
6156 dprintf(fd, "\nNumber of pending requests: %zu \n",
6157 mPendingRequestsList.size());
6158 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6159 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6160 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6161 for(pendingRequestIterator i = mPendingRequestsList.begin();
6162 i != mPendingRequestsList.end(); i++) {
6163 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6164 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6165 i->input_buffer);
6166 }
6167 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6168 mPendingBuffersMap.get_num_overall_buffers());
6169 dprintf(fd, "-------+------------------\n");
6170 dprintf(fd, " Frame | Stream type mask \n");
6171 dprintf(fd, "-------+------------------\n");
6172 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6173 for(auto &j : req.mPendingBufferList) {
6174 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6175 dprintf(fd, " %5d | %11d \n",
6176 req.frame_number, channel->getStreamTypeMask());
6177 }
6178 }
6179 dprintf(fd, "-------+------------------\n");
6180
6181 dprintf(fd, "\nPending frame drop list: %zu\n",
6182 mPendingFrameDropList.size());
6183 dprintf(fd, "-------+-----------\n");
6184 dprintf(fd, " Frame | Stream ID \n");
6185 dprintf(fd, "-------+-----------\n");
6186 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6187 i != mPendingFrameDropList.end(); i++) {
6188 dprintf(fd, " %5d | %9d \n",
6189 i->frame_number, i->stream_ID);
6190 }
6191 dprintf(fd, "-------+-----------\n");
6192
6193 dprintf(fd, "\n Camera HAL3 information End \n");
6194
6195 /* use dumpsys media.camera as trigger to send update debug level event */
6196 mUpdateDebugLevel = true;
6197 pthread_mutex_unlock(&mMutex);
6198 return;
6199}
6200
6201/*===========================================================================
6202 * FUNCTION : flush
6203 *
6204 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6205 * conditionally restarts channels
6206 *
6207 * PARAMETERS :
6208 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006209 * @ stopChannelImmediately: stop the channel immediately. This should be used
6210 * when device encountered an error and MIPI may has
6211 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006212 *
6213 * RETURN :
6214 * 0 on success
6215 * Error code on failure
6216 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006217int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006218{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006219 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006220 int32_t rc = NO_ERROR;
6221
6222 LOGD("Unblocking Process Capture Request");
6223 pthread_mutex_lock(&mMutex);
6224 mFlush = true;
6225 pthread_mutex_unlock(&mMutex);
6226
6227 rc = stopAllChannels();
6228 // unlink of dualcam
6229 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006230 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6231 &m_pDualCamCmdPtr->bundle_info;
6232 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006233 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6234 pthread_mutex_lock(&gCamLock);
6235
6236 if (mIsMainCamera == 1) {
6237 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6238 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006239 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006240 // related session id should be session id of linked session
6241 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6242 } else {
6243 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6244 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006245 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006246 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6247 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006248 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006249 pthread_mutex_unlock(&gCamLock);
6250
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006251 rc = mCameraHandle->ops->set_dual_cam_cmd(
6252 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006253 if (rc < 0) {
6254 LOGE("Dualcam: Unlink failed, but still proceed to close");
6255 }
6256 }
6257
6258 if (rc < 0) {
6259 LOGE("stopAllChannels failed");
6260 return rc;
6261 }
6262 if (mChannelHandle) {
6263 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006264 mChannelHandle, stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006265 }
6266
6267 // Reset bundle info
6268 rc = setBundleInfo();
6269 if (rc < 0) {
6270 LOGE("setBundleInfo failed %d", rc);
6271 return rc;
6272 }
6273
6274 // Mutex Lock
6275 pthread_mutex_lock(&mMutex);
6276
6277 // Unblock process_capture_request
6278 mPendingLiveRequest = 0;
6279 pthread_cond_signal(&mRequestCond);
6280
6281 rc = notifyErrorForPendingRequests();
6282 if (rc < 0) {
6283 LOGE("notifyErrorForPendingRequests failed");
6284 pthread_mutex_unlock(&mMutex);
6285 return rc;
6286 }
6287
6288 mFlush = false;
6289
6290 // Start the Streams/Channels
6291 if (restartChannels) {
6292 rc = startAllChannels();
6293 if (rc < 0) {
6294 LOGE("startAllChannels failed");
6295 pthread_mutex_unlock(&mMutex);
6296 return rc;
6297 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006298 if (mChannelHandle) {
6299 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006300 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006301 if (rc < 0) {
6302 LOGE("start_channel failed");
6303 pthread_mutex_unlock(&mMutex);
6304 return rc;
6305 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006306 }
6307 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006308 pthread_mutex_unlock(&mMutex);
6309
6310 return 0;
6311}
6312
6313/*===========================================================================
6314 * FUNCTION : flushPerf
6315 *
6316 * DESCRIPTION: This is the performance optimization version of flush that does
6317 * not use stream off, rather flushes the system
6318 *
6319 * PARAMETERS :
6320 *
6321 *
6322 * RETURN : 0 : success
6323 * -EINVAL: input is malformed (device is not valid)
6324 * -ENODEV: if the device has encountered a serious error
6325 *==========================================================================*/
6326int QCamera3HardwareInterface::flushPerf()
6327{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006328 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006329 int32_t rc = 0;
6330 struct timespec timeout;
6331 bool timed_wait = false;
6332
6333 pthread_mutex_lock(&mMutex);
6334 mFlushPerf = true;
6335 mPendingBuffersMap.numPendingBufsAtFlush =
6336 mPendingBuffersMap.get_num_overall_buffers();
6337 LOGD("Calling flush. Wait for %d buffers to return",
6338 mPendingBuffersMap.numPendingBufsAtFlush);
6339
6340 /* send the flush event to the backend */
6341 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6342 if (rc < 0) {
6343 LOGE("Error in flush: IOCTL failure");
6344 mFlushPerf = false;
6345 pthread_mutex_unlock(&mMutex);
6346 return -ENODEV;
6347 }
6348
6349 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6350 LOGD("No pending buffers in HAL, return flush");
6351 mFlushPerf = false;
6352 pthread_mutex_unlock(&mMutex);
6353 return rc;
6354 }
6355
6356 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006357 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006358 if (rc < 0) {
6359 LOGE("Error reading the real time clock, cannot use timed wait");
6360 } else {
6361 timeout.tv_sec += FLUSH_TIMEOUT;
6362 timed_wait = true;
6363 }
6364
6365 //Block on conditional variable
6366 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6367 LOGD("Waiting on mBuffersCond");
6368 if (!timed_wait) {
6369 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6370 if (rc != 0) {
6371 LOGE("pthread_cond_wait failed due to rc = %s",
6372 strerror(rc));
6373 break;
6374 }
6375 } else {
6376 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6377 if (rc != 0) {
6378 LOGE("pthread_cond_timedwait failed due to rc = %s",
6379 strerror(rc));
6380 break;
6381 }
6382 }
6383 }
6384 if (rc != 0) {
6385 mFlushPerf = false;
6386 pthread_mutex_unlock(&mMutex);
6387 return -ENODEV;
6388 }
6389
6390 LOGD("Received buffers, now safe to return them");
6391
6392 //make sure the channels handle flush
6393 //currently only required for the picture channel to release snapshot resources
6394 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6395 it != mStreamInfo.end(); it++) {
6396 QCamera3Channel *channel = (*it)->channel;
6397 if (channel) {
6398 rc = channel->flush();
6399 if (rc) {
6400 LOGE("Flushing the channels failed with error %d", rc);
6401 // even though the channel flush failed we need to continue and
6402 // return the buffers we have to the framework, however the return
6403 // value will be an error
6404 rc = -ENODEV;
6405 }
6406 }
6407 }
6408
6409 /* notify the frameworks and send errored results */
6410 rc = notifyErrorForPendingRequests();
6411 if (rc < 0) {
6412 LOGE("notifyErrorForPendingRequests failed");
6413 pthread_mutex_unlock(&mMutex);
6414 return rc;
6415 }
6416
6417 //unblock process_capture_request
6418 mPendingLiveRequest = 0;
6419 unblockRequestIfNecessary();
6420
6421 mFlushPerf = false;
6422 pthread_mutex_unlock(&mMutex);
6423 LOGD ("Flush Operation complete. rc = %d", rc);
6424 return rc;
6425}
6426
6427/*===========================================================================
6428 * FUNCTION : handleCameraDeviceError
6429 *
6430 * DESCRIPTION: This function calls internal flush and notifies the error to
6431 * framework and updates the state variable.
6432 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006433 * PARAMETERS :
6434 * @stopChannelImmediately : stop channels immediately without waiting for
6435 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006436 *
6437 * RETURN : NO_ERROR on Success
6438 * Error code on failure
6439 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006440int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006441{
6442 int32_t rc = NO_ERROR;
6443
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006444 {
6445 Mutex::Autolock lock(mFlushLock);
6446 pthread_mutex_lock(&mMutex);
6447 if (mState != ERROR) {
6448 //if mState != ERROR, nothing to be done
6449 pthread_mutex_unlock(&mMutex);
6450 return NO_ERROR;
6451 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006452 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006453
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006454 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006455 if (NO_ERROR != rc) {
6456 LOGE("internal flush to handle mState = ERROR failed");
6457 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006458
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006459 pthread_mutex_lock(&mMutex);
6460 mState = DEINIT;
6461 pthread_mutex_unlock(&mMutex);
6462 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006463
6464 camera3_notify_msg_t notify_msg;
6465 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6466 notify_msg.type = CAMERA3_MSG_ERROR;
6467 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6468 notify_msg.message.error.error_stream = NULL;
6469 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006470 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006471
6472 return rc;
6473}
6474
6475/*===========================================================================
6476 * FUNCTION : captureResultCb
6477 *
6478 * DESCRIPTION: Callback handler for all capture result
6479 * (streams, as well as metadata)
6480 *
6481 * PARAMETERS :
6482 * @metadata : metadata information
6483 * @buffer : actual gralloc buffer to be returned to frameworks.
6484 * NULL if metadata.
6485 *
6486 * RETURN : NONE
6487 *==========================================================================*/
6488void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6489 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6490{
6491 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006492 pthread_mutex_lock(&mMutex);
6493 uint8_t batchSize = mBatchSize;
6494 pthread_mutex_unlock(&mMutex);
6495 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006496 handleBatchMetadata(metadata_buf,
6497 true /* free_and_bufdone_meta_buf */);
6498 } else { /* mBatchSize = 0 */
6499 hdrPlusPerfLock(metadata_buf);
6500 pthread_mutex_lock(&mMutex);
6501 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006502 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006503 true /* last urgent frame of batch metadata */,
6504 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006505 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006506 pthread_mutex_unlock(&mMutex);
6507 }
6508 } else if (isInputBuffer) {
6509 pthread_mutex_lock(&mMutex);
6510 handleInputBufferWithLock(frame_number);
6511 pthread_mutex_unlock(&mMutex);
6512 } else {
6513 pthread_mutex_lock(&mMutex);
6514 handleBufferWithLock(buffer, frame_number);
6515 pthread_mutex_unlock(&mMutex);
6516 }
6517 return;
6518}
6519
6520/*===========================================================================
6521 * FUNCTION : getReprocessibleOutputStreamId
6522 *
6523 * DESCRIPTION: Get source output stream id for the input reprocess stream
6524 * based on size and format, which would be the largest
6525 * output stream if an input stream exists.
6526 *
6527 * PARAMETERS :
6528 * @id : return the stream id if found
6529 *
6530 * RETURN : int32_t type of status
6531 * NO_ERROR -- success
6532 * none-zero failure code
6533 *==========================================================================*/
6534int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6535{
6536 /* check if any output or bidirectional stream with the same size and format
6537 and return that stream */
6538 if ((mInputStreamInfo.dim.width > 0) &&
6539 (mInputStreamInfo.dim.height > 0)) {
6540 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6541 it != mStreamInfo.end(); it++) {
6542
6543 camera3_stream_t *stream = (*it)->stream;
6544 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6545 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6546 (stream->format == mInputStreamInfo.format)) {
6547 // Usage flag for an input stream and the source output stream
6548 // may be different.
6549 LOGD("Found reprocessible output stream! %p", *it);
6550 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6551 stream->usage, mInputStreamInfo.usage);
6552
6553 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6554 if (channel != NULL && channel->mStreams[0]) {
6555 id = channel->mStreams[0]->getMyServerID();
6556 return NO_ERROR;
6557 }
6558 }
6559 }
6560 } else {
6561 LOGD("No input stream, so no reprocessible output stream");
6562 }
6563 return NAME_NOT_FOUND;
6564}
6565
6566/*===========================================================================
6567 * FUNCTION : lookupFwkName
6568 *
6569 * DESCRIPTION: In case the enum is not same in fwk and backend
6570 * make sure the parameter is correctly propogated
6571 *
6572 * PARAMETERS :
6573 * @arr : map between the two enums
6574 * @len : len of the map
6575 * @hal_name : name of the hal_parm to map
6576 *
6577 * RETURN : int type of status
6578 * fwk_name -- success
6579 * none-zero failure code
6580 *==========================================================================*/
6581template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6582 size_t len, halType hal_name)
6583{
6584
6585 for (size_t i = 0; i < len; i++) {
6586 if (arr[i].hal_name == hal_name) {
6587 return arr[i].fwk_name;
6588 }
6589 }
6590
6591 /* Not able to find matching framework type is not necessarily
6592 * an error case. This happens when mm-camera supports more attributes
6593 * than the frameworks do */
6594 LOGH("Cannot find matching framework type");
6595 return NAME_NOT_FOUND;
6596}
6597
6598/*===========================================================================
6599 * FUNCTION : lookupHalName
6600 *
6601 * DESCRIPTION: In case the enum is not same in fwk and backend
6602 * make sure the parameter is correctly propogated
6603 *
6604 * PARAMETERS :
6605 * @arr : map between the two enums
6606 * @len : len of the map
6607 * @fwk_name : name of the hal_parm to map
6608 *
6609 * RETURN : int32_t type of status
6610 * hal_name -- success
6611 * none-zero failure code
6612 *==========================================================================*/
6613template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6614 size_t len, fwkType fwk_name)
6615{
6616 for (size_t i = 0; i < len; i++) {
6617 if (arr[i].fwk_name == fwk_name) {
6618 return arr[i].hal_name;
6619 }
6620 }
6621
6622 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6623 return NAME_NOT_FOUND;
6624}
6625
6626/*===========================================================================
6627 * FUNCTION : lookupProp
6628 *
6629 * DESCRIPTION: lookup a value by its name
6630 *
6631 * PARAMETERS :
6632 * @arr : map between the two enums
6633 * @len : size of the map
6634 * @name : name to be looked up
6635 *
6636 * RETURN : Value if found
6637 * CAM_CDS_MODE_MAX if not found
6638 *==========================================================================*/
6639template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6640 size_t len, const char *name)
6641{
6642 if (name) {
6643 for (size_t i = 0; i < len; i++) {
6644 if (!strcmp(arr[i].desc, name)) {
6645 return arr[i].val;
6646 }
6647 }
6648 }
6649 return CAM_CDS_MODE_MAX;
6650}
6651
6652/*===========================================================================
6653 *
6654 * DESCRIPTION:
6655 *
6656 * PARAMETERS :
6657 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006658 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006659 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006660 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6661 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006662 *
6663 * RETURN : camera_metadata_t*
6664 * metadata in a format specified by fwk
6665 *==========================================================================*/
6666camera_metadata_t*
6667QCamera3HardwareInterface::translateFromHalMetadata(
6668 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006669 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006670 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006671 bool lastMetadataInBatch,
6672 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006673{
6674 CameraMetadata camMetadata;
6675 camera_metadata_t *resultMetadata;
6676
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006677 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006678 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6679 * Timestamp is needed because it's used for shutter notify calculation.
6680 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006681 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006682 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006683 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006684 }
6685
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006686 if (pendingRequest.jpegMetadata.entryCount())
6687 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006688
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006689 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6690 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6691 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6692 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6693 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006694 if (mBatchSize == 0) {
6695 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006696 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006697 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006698
Samuel Ha68ba5172016-12-15 18:41:12 -08006699 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6700 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006701 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006702 // DevCamDebug metadata translateFromHalMetadata AF
6703 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6704 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6705 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6706 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6707 }
6708 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6709 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6710 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6711 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6712 }
6713 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6714 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6715 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6716 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6717 }
6718 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6719 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6720 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6721 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6722 }
6723 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6724 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6725 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6726 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6727 }
6728 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6729 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6730 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6731 *DevCamDebug_af_monitor_pdaf_target_pos;
6732 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6733 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6734 }
6735 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6736 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6737 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6738 *DevCamDebug_af_monitor_pdaf_confidence;
6739 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6740 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6741 }
6742 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6743 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6744 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6745 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6746 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6747 }
6748 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6749 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6750 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6751 *DevCamDebug_af_monitor_tof_target_pos;
6752 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6753 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6754 }
6755 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6756 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6757 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6758 *DevCamDebug_af_monitor_tof_confidence;
6759 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6760 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6761 }
6762 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6763 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6764 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6765 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6766 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6767 }
6768 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6769 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6770 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6771 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6772 &fwk_DevCamDebug_af_monitor_type_select, 1);
6773 }
6774 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6775 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6776 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6777 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6778 &fwk_DevCamDebug_af_monitor_refocus, 1);
6779 }
6780 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6781 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6782 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6783 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6784 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6785 }
6786 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6787 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6788 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6789 *DevCamDebug_af_search_pdaf_target_pos;
6790 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6791 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6792 }
6793 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6794 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6795 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6796 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6797 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6798 }
6799 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6800 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6801 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6802 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6803 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6804 }
6805 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6806 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6807 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6808 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6809 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6810 }
6811 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6812 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6813 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6814 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6815 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6816 }
6817 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6818 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6819 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6820 *DevCamDebug_af_search_tof_target_pos;
6821 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6822 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6823 }
6824 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6825 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6826 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6827 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6828 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6829 }
6830 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6831 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6832 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6833 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6834 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6835 }
6836 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6837 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6838 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6839 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6840 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6841 }
6842 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6843 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6844 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6845 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6846 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6847 }
6848 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6849 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6850 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6851 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6852 &fwk_DevCamDebug_af_search_type_select, 1);
6853 }
6854 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6855 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6856 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6857 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6858 &fwk_DevCamDebug_af_search_next_pos, 1);
6859 }
6860 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6861 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6862 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6863 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6864 &fwk_DevCamDebug_af_search_target_pos, 1);
6865 }
6866 // DevCamDebug metadata translateFromHalMetadata AEC
6867 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6868 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6869 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6870 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6871 }
6872 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6873 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6874 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6875 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6876 }
6877 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6878 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6879 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6880 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6881 }
6882 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6883 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6884 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6885 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6886 }
6887 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6888 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6889 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6890 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6891 }
6892 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6893 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6894 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6895 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6896 }
6897 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6898 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6899 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6900 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6901 }
6902 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6903 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6904 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6905 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6906 }
Samuel Ha34229982017-02-17 13:51:11 -08006907 // DevCamDebug metadata translateFromHalMetadata zzHDR
6908 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6909 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6910 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6911 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6912 }
6913 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6914 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006915 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006916 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6917 }
6918 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6919 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6920 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6921 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6922 }
6923 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6924 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006925 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006926 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6927 }
6928 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6929 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6930 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6931 *DevCamDebug_aec_hdr_sensitivity_ratio;
6932 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6933 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6934 }
6935 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6936 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6937 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6938 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6939 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6940 }
6941 // DevCamDebug metadata translateFromHalMetadata ADRC
6942 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6943 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6944 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6945 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6946 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6947 }
6948 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6949 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6950 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6951 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6952 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6953 }
6954 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6955 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6956 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6957 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6958 }
6959 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6960 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6961 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6962 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6963 }
6964 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6965 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6966 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6967 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6968 }
6969 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6970 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6971 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6972 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6973 }
Samuel Habdf4fac2017-07-28 17:21:18 -07006974 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
6975 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
6976 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
6977 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
6978 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
6979 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
6980 }
6981 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
6982 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
6983 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
6984 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
6985 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
6986 }
6987 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
6988 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
6989 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
6990 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
6991 &fwk_DevCamDebug_aec_subject_motion, 1);
6992 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006993 // DevCamDebug metadata translateFromHalMetadata AWB
6994 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6995 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6996 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6997 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6998 }
6999 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7000 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7001 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7002 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7003 }
7004 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7005 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7006 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7007 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7008 }
7009 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7010 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7011 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7012 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7013 }
7014 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7015 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7016 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7017 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7018 }
7019 }
7020 // atrace_end(ATRACE_TAG_ALWAYS);
7021
Thierry Strudel3d639192016-09-09 11:52:26 -07007022 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7023 int64_t fwk_frame_number = *frame_number;
7024 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7025 }
7026
7027 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7028 int32_t fps_range[2];
7029 fps_range[0] = (int32_t)float_range->min_fps;
7030 fps_range[1] = (int32_t)float_range->max_fps;
7031 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7032 fps_range, 2);
7033 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7034 fps_range[0], fps_range[1]);
7035 }
7036
7037 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7038 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7039 }
7040
7041 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7042 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7043 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7044 *sceneMode);
7045 if (NAME_NOT_FOUND != val) {
7046 uint8_t fwkSceneMode = (uint8_t)val;
7047 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7048 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7049 fwkSceneMode);
7050 }
7051 }
7052
7053 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7054 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7055 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7056 }
7057
7058 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7059 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7060 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7061 }
7062
7063 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7064 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7065 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7066 }
7067
7068 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7069 CAM_INTF_META_EDGE_MODE, metadata) {
7070 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7071 }
7072
7073 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7074 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7075 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7076 }
7077
7078 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7079 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7080 }
7081
7082 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7083 if (0 <= *flashState) {
7084 uint8_t fwk_flashState = (uint8_t) *flashState;
7085 if (!gCamCapability[mCameraId]->flash_available) {
7086 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7087 }
7088 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7089 }
7090 }
7091
7092 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7093 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7094 if (NAME_NOT_FOUND != val) {
7095 uint8_t fwk_flashMode = (uint8_t)val;
7096 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7097 }
7098 }
7099
7100 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7101 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7102 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7103 }
7104
7105 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7106 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7107 }
7108
7109 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7110 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7111 }
7112
7113 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7114 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7115 }
7116
7117 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7118 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7119 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7120 }
7121
7122 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7123 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7124 LOGD("fwk_videoStab = %d", fwk_videoStab);
7125 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7126 } else {
7127 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7128 // and so hardcoding the Video Stab result to OFF mode.
7129 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7130 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007131 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007132 }
7133
7134 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7135 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7136 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7137 }
7138
7139 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7140 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7141 }
7142
Thierry Strudel3d639192016-09-09 11:52:26 -07007143 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7144 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007145 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007146
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007147 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7148 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007149
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007150 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007151 blackLevelAppliedPattern->cam_black_level[0],
7152 blackLevelAppliedPattern->cam_black_level[1],
7153 blackLevelAppliedPattern->cam_black_level[2],
7154 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007155 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7156 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007157
7158#ifndef USE_HAL_3_3
7159 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307160 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007161 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307162 fwk_blackLevelInd[0] /= 16.0;
7163 fwk_blackLevelInd[1] /= 16.0;
7164 fwk_blackLevelInd[2] /= 16.0;
7165 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007166 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7167 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007168#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007169 }
7170
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007171#ifndef USE_HAL_3_3
7172 // Fixed whitelevel is used by ISP/Sensor
7173 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7174 &gCamCapability[mCameraId]->white_level, 1);
7175#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007176
7177 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7178 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7179 int32_t scalerCropRegion[4];
7180 scalerCropRegion[0] = hScalerCropRegion->left;
7181 scalerCropRegion[1] = hScalerCropRegion->top;
7182 scalerCropRegion[2] = hScalerCropRegion->width;
7183 scalerCropRegion[3] = hScalerCropRegion->height;
7184
7185 // Adjust crop region from sensor output coordinate system to active
7186 // array coordinate system.
7187 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7188 scalerCropRegion[2], scalerCropRegion[3]);
7189
7190 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7191 }
7192
7193 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7194 LOGD("sensorExpTime = %lld", *sensorExpTime);
7195 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7196 }
7197
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007198 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7199 LOGD("expTimeBoost = %f", *expTimeBoost);
7200 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7201 }
7202
Thierry Strudel3d639192016-09-09 11:52:26 -07007203 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7204 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7205 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7206 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7207 }
7208
7209 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7210 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7211 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7212 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7213 sensorRollingShutterSkew, 1);
7214 }
7215
7216 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7217 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7218 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7219
7220 //calculate the noise profile based on sensitivity
7221 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7222 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7223 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7224 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7225 noise_profile[i] = noise_profile_S;
7226 noise_profile[i+1] = noise_profile_O;
7227 }
7228 LOGD("noise model entry (S, O) is (%f, %f)",
7229 noise_profile_S, noise_profile_O);
7230 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7231 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7232 }
7233
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007234#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007235 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007236 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007237 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007238 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007239 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7240 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7241 }
7242 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007243#endif
7244
Thierry Strudel3d639192016-09-09 11:52:26 -07007245 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7246 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7247 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7248 }
7249
7250 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7251 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7252 *faceDetectMode);
7253 if (NAME_NOT_FOUND != val) {
7254 uint8_t fwk_faceDetectMode = (uint8_t)val;
7255 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7256
7257 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7258 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7259 CAM_INTF_META_FACE_DETECTION, metadata) {
7260 uint8_t numFaces = MIN(
7261 faceDetectionInfo->num_faces_detected, MAX_ROI);
7262 int32_t faceIds[MAX_ROI];
7263 uint8_t faceScores[MAX_ROI];
7264 int32_t faceRectangles[MAX_ROI * 4];
7265 int32_t faceLandmarks[MAX_ROI * 6];
7266 size_t j = 0, k = 0;
7267
7268 for (size_t i = 0; i < numFaces; i++) {
7269 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7270 // Adjust crop region from sensor output coordinate system to active
7271 // array coordinate system.
7272 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7273 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7274 rect.width, rect.height);
7275
7276 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7277 faceRectangles+j, -1);
7278
Jason Lee8ce36fa2017-04-19 19:40:37 -07007279 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7280 "bottom-right (%d, %d)",
7281 faceDetectionInfo->frame_id, i,
7282 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7283 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7284
Thierry Strudel3d639192016-09-09 11:52:26 -07007285 j+= 4;
7286 }
7287 if (numFaces <= 0) {
7288 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7289 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7290 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7291 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7292 }
7293
7294 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7295 numFaces);
7296 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7297 faceRectangles, numFaces * 4U);
7298 if (fwk_faceDetectMode ==
7299 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7300 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7301 CAM_INTF_META_FACE_LANDMARK, metadata) {
7302
7303 for (size_t i = 0; i < numFaces; i++) {
7304 // Map the co-ordinate sensor output coordinate system to active
7305 // array coordinate system.
7306 mCropRegionMapper.toActiveArray(
7307 landmarks->face_landmarks[i].left_eye_center.x,
7308 landmarks->face_landmarks[i].left_eye_center.y);
7309 mCropRegionMapper.toActiveArray(
7310 landmarks->face_landmarks[i].right_eye_center.x,
7311 landmarks->face_landmarks[i].right_eye_center.y);
7312 mCropRegionMapper.toActiveArray(
7313 landmarks->face_landmarks[i].mouth_center.x,
7314 landmarks->face_landmarks[i].mouth_center.y);
7315
7316 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007317
7318 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7319 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7320 faceDetectionInfo->frame_id, i,
7321 faceLandmarks[k + LEFT_EYE_X],
7322 faceLandmarks[k + LEFT_EYE_Y],
7323 faceLandmarks[k + RIGHT_EYE_X],
7324 faceLandmarks[k + RIGHT_EYE_Y],
7325 faceLandmarks[k + MOUTH_X],
7326 faceLandmarks[k + MOUTH_Y]);
7327
Thierry Strudel04e026f2016-10-10 11:27:36 -07007328 k+= TOTAL_LANDMARK_INDICES;
7329 }
7330 } else {
7331 for (size_t i = 0; i < numFaces; i++) {
7332 setInvalidLandmarks(faceLandmarks+k);
7333 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007334 }
7335 }
7336
Jason Lee49619db2017-04-13 12:07:22 -07007337 for (size_t i = 0; i < numFaces; i++) {
7338 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7339
7340 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7341 faceDetectionInfo->frame_id, i, faceIds[i]);
7342 }
7343
Thierry Strudel3d639192016-09-09 11:52:26 -07007344 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7345 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7346 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007347 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007348 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7349 CAM_INTF_META_FACE_BLINK, metadata) {
7350 uint8_t detected[MAX_ROI];
7351 uint8_t degree[MAX_ROI * 2];
7352 for (size_t i = 0; i < numFaces; i++) {
7353 detected[i] = blinks->blink[i].blink_detected;
7354 degree[2 * i] = blinks->blink[i].left_blink;
7355 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007356
Jason Lee49619db2017-04-13 12:07:22 -07007357 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7358 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7359 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7360 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007361 }
7362 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7363 detected, numFaces);
7364 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7365 degree, numFaces * 2);
7366 }
7367 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7368 CAM_INTF_META_FACE_SMILE, metadata) {
7369 uint8_t degree[MAX_ROI];
7370 uint8_t confidence[MAX_ROI];
7371 for (size_t i = 0; i < numFaces; i++) {
7372 degree[i] = smiles->smile[i].smile_degree;
7373 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007374
Jason Lee49619db2017-04-13 12:07:22 -07007375 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7376 "smile_degree=%d, smile_score=%d",
7377 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007378 }
7379 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7380 degree, numFaces);
7381 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7382 confidence, numFaces);
7383 }
7384 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7385 CAM_INTF_META_FACE_GAZE, metadata) {
7386 int8_t angle[MAX_ROI];
7387 int32_t direction[MAX_ROI * 3];
7388 int8_t degree[MAX_ROI * 2];
7389 for (size_t i = 0; i < numFaces; i++) {
7390 angle[i] = gazes->gaze[i].gaze_angle;
7391 direction[3 * i] = gazes->gaze[i].updown_dir;
7392 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7393 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7394 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7395 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007396
7397 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7398 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7399 "left_right_gaze=%d, top_bottom_gaze=%d",
7400 faceDetectionInfo->frame_id, i, angle[i],
7401 direction[3 * i], direction[3 * i + 1],
7402 direction[3 * i + 2],
7403 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007404 }
7405 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7406 (uint8_t *)angle, numFaces);
7407 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7408 direction, numFaces * 3);
7409 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7410 (uint8_t *)degree, numFaces * 2);
7411 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007412 }
7413 }
7414 }
7415 }
7416
7417 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7418 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007419 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007420 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007421 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007422
Shuzhen Wang14415f52016-11-16 18:26:18 -08007423 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7424 histogramBins = *histBins;
7425 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7426 }
7427
7428 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007429 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7430 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007431 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007432
7433 switch (stats_data->type) {
7434 case CAM_HISTOGRAM_TYPE_BAYER:
7435 switch (stats_data->bayer_stats.data_type) {
7436 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007437 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7438 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007439 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007440 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7441 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007442 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007443 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7444 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007445 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007446 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007447 case CAM_STATS_CHANNEL_R:
7448 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007449 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7450 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007451 }
7452 break;
7453 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007454 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007455 break;
7456 }
7457
Shuzhen Wang14415f52016-11-16 18:26:18 -08007458 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007459 }
7460 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007461 }
7462
7463 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7464 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7465 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7466 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7467 }
7468
7469 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7470 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7471 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7472 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7473 }
7474
7475 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7476 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7477 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7478 CAM_MAX_SHADING_MAP_HEIGHT);
7479 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7480 CAM_MAX_SHADING_MAP_WIDTH);
7481 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7482 lensShadingMap->lens_shading, 4U * map_width * map_height);
7483 }
7484
7485 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7486 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7487 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7488 }
7489
7490 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7491 //Populate CAM_INTF_META_TONEMAP_CURVES
7492 /* ch0 = G, ch 1 = B, ch 2 = R*/
7493 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7494 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7495 tonemap->tonemap_points_cnt,
7496 CAM_MAX_TONEMAP_CURVE_SIZE);
7497 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7498 }
7499
7500 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7501 &tonemap->curves[0].tonemap_points[0][0],
7502 tonemap->tonemap_points_cnt * 2);
7503
7504 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7505 &tonemap->curves[1].tonemap_points[0][0],
7506 tonemap->tonemap_points_cnt * 2);
7507
7508 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7509 &tonemap->curves[2].tonemap_points[0][0],
7510 tonemap->tonemap_points_cnt * 2);
7511 }
7512
7513 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7514 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7515 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7516 CC_GAIN_MAX);
7517 }
7518
7519 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7520 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7521 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7522 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7523 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7524 }
7525
7526 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7527 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7528 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7529 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7530 toneCurve->tonemap_points_cnt,
7531 CAM_MAX_TONEMAP_CURVE_SIZE);
7532 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7533 }
7534 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7535 (float*)toneCurve->curve.tonemap_points,
7536 toneCurve->tonemap_points_cnt * 2);
7537 }
7538
7539 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7540 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7541 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7542 predColorCorrectionGains->gains, 4);
7543 }
7544
7545 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7546 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7547 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7548 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7549 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7550 }
7551
7552 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7553 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7554 }
7555
7556 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7557 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7558 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7559 }
7560
7561 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7562 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7563 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7564 }
7565
7566 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7567 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7568 *effectMode);
7569 if (NAME_NOT_FOUND != val) {
7570 uint8_t fwk_effectMode = (uint8_t)val;
7571 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7572 }
7573 }
7574
7575 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7576 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7577 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7578 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7579 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7580 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7581 }
7582 int32_t fwk_testPatternData[4];
7583 fwk_testPatternData[0] = testPatternData->r;
7584 fwk_testPatternData[3] = testPatternData->b;
7585 switch (gCamCapability[mCameraId]->color_arrangement) {
7586 case CAM_FILTER_ARRANGEMENT_RGGB:
7587 case CAM_FILTER_ARRANGEMENT_GRBG:
7588 fwk_testPatternData[1] = testPatternData->gr;
7589 fwk_testPatternData[2] = testPatternData->gb;
7590 break;
7591 case CAM_FILTER_ARRANGEMENT_GBRG:
7592 case CAM_FILTER_ARRANGEMENT_BGGR:
7593 fwk_testPatternData[2] = testPatternData->gr;
7594 fwk_testPatternData[1] = testPatternData->gb;
7595 break;
7596 default:
7597 LOGE("color arrangement %d is not supported",
7598 gCamCapability[mCameraId]->color_arrangement);
7599 break;
7600 }
7601 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7602 }
7603
7604 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7605 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7606 }
7607
7608 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7609 String8 str((const char *)gps_methods);
7610 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7611 }
7612
7613 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7614 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7615 }
7616
7617 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7618 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7619 }
7620
7621 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7622 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7623 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7624 }
7625
7626 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7627 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7628 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7629 }
7630
7631 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7632 int32_t fwk_thumb_size[2];
7633 fwk_thumb_size[0] = thumb_size->width;
7634 fwk_thumb_size[1] = thumb_size->height;
7635 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7636 }
7637
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007638 // Skip reprocess metadata if there is no input stream.
7639 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7640 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7641 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7642 privateData,
7643 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7644 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007645 }
7646
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007647 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007648 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007649 meteringMode, 1);
7650 }
7651
Thierry Strudel54dc9782017-02-15 12:12:10 -08007652 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7653 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7654 LOGD("hdr_scene_data: %d %f\n",
7655 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7656 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7657 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7658 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7659 &isHdr, 1);
7660 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7661 &isHdrConfidence, 1);
7662 }
7663
7664
7665
Thierry Strudel3d639192016-09-09 11:52:26 -07007666 if (metadata->is_tuning_params_valid) {
7667 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7668 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7669 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7670
7671
7672 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7673 sizeof(uint32_t));
7674 data += sizeof(uint32_t);
7675
7676 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7677 sizeof(uint32_t));
7678 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7679 data += sizeof(uint32_t);
7680
7681 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7682 sizeof(uint32_t));
7683 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7684 data += sizeof(uint32_t);
7685
7686 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7687 sizeof(uint32_t));
7688 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7689 data += sizeof(uint32_t);
7690
7691 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7692 sizeof(uint32_t));
7693 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7694 data += sizeof(uint32_t);
7695
7696 metadata->tuning_params.tuning_mod3_data_size = 0;
7697 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7698 sizeof(uint32_t));
7699 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7700 data += sizeof(uint32_t);
7701
7702 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7703 TUNING_SENSOR_DATA_MAX);
7704 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7705 count);
7706 data += count;
7707
7708 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7709 TUNING_VFE_DATA_MAX);
7710 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7711 count);
7712 data += count;
7713
7714 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7715 TUNING_CPP_DATA_MAX);
7716 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7717 count);
7718 data += count;
7719
7720 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7721 TUNING_CAC_DATA_MAX);
7722 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7723 count);
7724 data += count;
7725
7726 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7727 (int32_t *)(void *)tuning_meta_data_blob,
7728 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7729 }
7730
7731 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7732 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7733 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7734 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7735 NEUTRAL_COL_POINTS);
7736 }
7737
7738 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7739 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7740 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7741 }
7742
7743 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7744 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7745 // Adjust crop region from sensor output coordinate system to active
7746 // array coordinate system.
7747 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7748 hAeRegions->rect.width, hAeRegions->rect.height);
7749
7750 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7751 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7752 REGIONS_TUPLE_COUNT);
7753 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7754 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7755 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7756 hAeRegions->rect.height);
7757 }
7758
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007759 if (!pendingRequest.focusStateSent) {
7760 if (pendingRequest.focusStateValid) {
7761 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7762 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007763 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007764 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7765 uint8_t fwk_afState = (uint8_t) *afState;
7766 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7767 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7768 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007769 }
7770 }
7771
Thierry Strudel3d639192016-09-09 11:52:26 -07007772 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7773 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7774 }
7775
7776 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7777 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7778 }
7779
7780 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7781 uint8_t fwk_lensState = *lensState;
7782 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7783 }
7784
Thierry Strudel3d639192016-09-09 11:52:26 -07007785 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007786 uint32_t ab_mode = *hal_ab_mode;
7787 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7788 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7789 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7790 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007791 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007792 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007793 if (NAME_NOT_FOUND != val) {
7794 uint8_t fwk_ab_mode = (uint8_t)val;
7795 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7796 }
7797 }
7798
7799 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7800 int val = lookupFwkName(SCENE_MODES_MAP,
7801 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7802 if (NAME_NOT_FOUND != val) {
7803 uint8_t fwkBestshotMode = (uint8_t)val;
7804 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7805 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7806 } else {
7807 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7808 }
7809 }
7810
7811 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7812 uint8_t fwk_mode = (uint8_t) *mode;
7813 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7814 }
7815
7816 /* Constant metadata values to be update*/
7817 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7818 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7819
7820 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7821 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7822
7823 int32_t hotPixelMap[2];
7824 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7825
7826 // CDS
7827 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7828 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7829 }
7830
Thierry Strudel04e026f2016-10-10 11:27:36 -07007831 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7832 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007833 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007834 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7835 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7836 } else {
7837 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7838 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007839
7840 if(fwk_hdr != curr_hdr_state) {
7841 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7842 if(fwk_hdr)
7843 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7844 else
7845 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7846 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007847 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7848 }
7849
Thierry Strudel54dc9782017-02-15 12:12:10 -08007850 //binning correction
7851 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7852 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7853 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7854 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7855 }
7856
Thierry Strudel04e026f2016-10-10 11:27:36 -07007857 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007858 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007859 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7860 int8_t is_ir_on = 0;
7861
7862 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7863 if(is_ir_on != curr_ir_state) {
7864 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7865 if(is_ir_on)
7866 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7867 else
7868 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7869 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007870 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007871 }
7872
Thierry Strudel269c81a2016-10-12 12:13:59 -07007873 // AEC SPEED
7874 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7875 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7876 }
7877
7878 // AWB SPEED
7879 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7880 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7881 }
7882
Thierry Strudel3d639192016-09-09 11:52:26 -07007883 // TNR
7884 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7885 uint8_t tnr_enable = tnr->denoise_enable;
7886 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007887 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7888 int8_t is_tnr_on = 0;
7889
7890 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7891 if(is_tnr_on != curr_tnr_state) {
7892 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7893 if(is_tnr_on)
7894 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7895 else
7896 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7897 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007898
7899 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7900 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7901 }
7902
7903 // Reprocess crop data
7904 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7905 uint8_t cnt = crop_data->num_of_streams;
7906 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7907 // mm-qcamera-daemon only posts crop_data for streams
7908 // not linked to pproc. So no valid crop metadata is not
7909 // necessarily an error case.
7910 LOGD("No valid crop metadata entries");
7911 } else {
7912 uint32_t reproc_stream_id;
7913 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7914 LOGD("No reprocessible stream found, ignore crop data");
7915 } else {
7916 int rc = NO_ERROR;
7917 Vector<int32_t> roi_map;
7918 int32_t *crop = new int32_t[cnt*4];
7919 if (NULL == crop) {
7920 rc = NO_MEMORY;
7921 }
7922 if (NO_ERROR == rc) {
7923 int32_t streams_found = 0;
7924 for (size_t i = 0; i < cnt; i++) {
7925 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7926 if (pprocDone) {
7927 // HAL already does internal reprocessing,
7928 // either via reprocessing before JPEG encoding,
7929 // or offline postprocessing for pproc bypass case.
7930 crop[0] = 0;
7931 crop[1] = 0;
7932 crop[2] = mInputStreamInfo.dim.width;
7933 crop[3] = mInputStreamInfo.dim.height;
7934 } else {
7935 crop[0] = crop_data->crop_info[i].crop.left;
7936 crop[1] = crop_data->crop_info[i].crop.top;
7937 crop[2] = crop_data->crop_info[i].crop.width;
7938 crop[3] = crop_data->crop_info[i].crop.height;
7939 }
7940 roi_map.add(crop_data->crop_info[i].roi_map.left);
7941 roi_map.add(crop_data->crop_info[i].roi_map.top);
7942 roi_map.add(crop_data->crop_info[i].roi_map.width);
7943 roi_map.add(crop_data->crop_info[i].roi_map.height);
7944 streams_found++;
7945 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7946 crop[0], crop[1], crop[2], crop[3]);
7947 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7948 crop_data->crop_info[i].roi_map.left,
7949 crop_data->crop_info[i].roi_map.top,
7950 crop_data->crop_info[i].roi_map.width,
7951 crop_data->crop_info[i].roi_map.height);
7952 break;
7953
7954 }
7955 }
7956 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7957 &streams_found, 1);
7958 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7959 crop, (size_t)(streams_found * 4));
7960 if (roi_map.array()) {
7961 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7962 roi_map.array(), roi_map.size());
7963 }
7964 }
7965 if (crop) {
7966 delete [] crop;
7967 }
7968 }
7969 }
7970 }
7971
7972 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7973 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7974 // so hardcoding the CAC result to OFF mode.
7975 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7976 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7977 } else {
7978 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7979 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7980 *cacMode);
7981 if (NAME_NOT_FOUND != val) {
7982 uint8_t resultCacMode = (uint8_t)val;
7983 // check whether CAC result from CB is equal to Framework set CAC mode
7984 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007985 if (pendingRequest.fwkCacMode != resultCacMode) {
7986 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07007987 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007988 //Check if CAC is disabled by property
7989 if (m_cacModeDisabled) {
7990 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7991 }
7992
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007993 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007994 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7995 } else {
7996 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7997 }
7998 }
7999 }
8000
8001 // Post blob of cam_cds_data through vendor tag.
8002 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8003 uint8_t cnt = cdsInfo->num_of_streams;
8004 cam_cds_data_t cdsDataOverride;
8005 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8006 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8007 cdsDataOverride.num_of_streams = 1;
8008 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8009 uint32_t reproc_stream_id;
8010 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8011 LOGD("No reprocessible stream found, ignore cds data");
8012 } else {
8013 for (size_t i = 0; i < cnt; i++) {
8014 if (cdsInfo->cds_info[i].stream_id ==
8015 reproc_stream_id) {
8016 cdsDataOverride.cds_info[0].cds_enable =
8017 cdsInfo->cds_info[i].cds_enable;
8018 break;
8019 }
8020 }
8021 }
8022 } else {
8023 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8024 }
8025 camMetadata.update(QCAMERA3_CDS_INFO,
8026 (uint8_t *)&cdsDataOverride,
8027 sizeof(cam_cds_data_t));
8028 }
8029
8030 // Ldaf calibration data
8031 if (!mLdafCalibExist) {
8032 IF_META_AVAILABLE(uint32_t, ldafCalib,
8033 CAM_INTF_META_LDAF_EXIF, metadata) {
8034 mLdafCalibExist = true;
8035 mLdafCalib[0] = ldafCalib[0];
8036 mLdafCalib[1] = ldafCalib[1];
8037 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8038 ldafCalib[0], ldafCalib[1]);
8039 }
8040 }
8041
Thierry Strudel54dc9782017-02-15 12:12:10 -08008042 // EXIF debug data through vendor tag
8043 /*
8044 * Mobicat Mask can assume 3 values:
8045 * 1 refers to Mobicat data,
8046 * 2 refers to Stats Debug and Exif Debug Data
8047 * 3 refers to Mobicat and Stats Debug Data
8048 * We want to make sure that we are sending Exif debug data
8049 * only when Mobicat Mask is 2.
8050 */
8051 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8052 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8053 (uint8_t *)(void *)mExifParams.debug_params,
8054 sizeof(mm_jpeg_debug_exif_params_t));
8055 }
8056
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008057 // Reprocess and DDM debug data through vendor tag
8058 cam_reprocess_info_t repro_info;
8059 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008060 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8061 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008062 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008063 }
8064 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8065 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008066 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008067 }
8068 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8069 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008070 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008071 }
8072 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8073 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008074 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008075 }
8076 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8077 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008078 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008079 }
8080 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008081 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008082 }
8083 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8084 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008085 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008086 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008087 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8088 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8089 }
8090 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8091 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8092 }
8093 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8094 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008095
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008096 // INSTANT AEC MODE
8097 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8098 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8099 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8100 }
8101
Shuzhen Wange763e802016-03-31 10:24:29 -07008102 // AF scene change
8103 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8104 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8105 }
8106
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008107 // Enable ZSL
8108 if (enableZsl != nullptr) {
8109 uint8_t value = *enableZsl ?
8110 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8111 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8112 }
8113
Xu Han821ea9c2017-05-23 09:00:40 -07008114 // OIS Data
8115 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8116 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8117 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8118 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8119 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8120 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8121 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8122 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8123 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8124 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8125 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8126 }
8127
Thierry Strudel3d639192016-09-09 11:52:26 -07008128 resultMetadata = camMetadata.release();
8129 return resultMetadata;
8130}
8131
8132/*===========================================================================
8133 * FUNCTION : saveExifParams
8134 *
8135 * DESCRIPTION:
8136 *
8137 * PARAMETERS :
8138 * @metadata : metadata information from callback
8139 *
8140 * RETURN : none
8141 *
8142 *==========================================================================*/
8143void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8144{
8145 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8146 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8147 if (mExifParams.debug_params) {
8148 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8149 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8150 }
8151 }
8152 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8153 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8154 if (mExifParams.debug_params) {
8155 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8156 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8157 }
8158 }
8159 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8160 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8161 if (mExifParams.debug_params) {
8162 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8163 mExifParams.debug_params->af_debug_params_valid = TRUE;
8164 }
8165 }
8166 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8167 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8168 if (mExifParams.debug_params) {
8169 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8170 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8171 }
8172 }
8173 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8174 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8175 if (mExifParams.debug_params) {
8176 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8177 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8178 }
8179 }
8180 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8181 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8182 if (mExifParams.debug_params) {
8183 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8184 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8185 }
8186 }
8187 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8188 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8189 if (mExifParams.debug_params) {
8190 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8191 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8192 }
8193 }
8194 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8195 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8196 if (mExifParams.debug_params) {
8197 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8198 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8199 }
8200 }
8201}
8202
8203/*===========================================================================
8204 * FUNCTION : get3AExifParams
8205 *
8206 * DESCRIPTION:
8207 *
8208 * PARAMETERS : none
8209 *
8210 *
8211 * RETURN : mm_jpeg_exif_params_t
8212 *
8213 *==========================================================================*/
8214mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8215{
8216 return mExifParams;
8217}
8218
8219/*===========================================================================
8220 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8221 *
8222 * DESCRIPTION:
8223 *
8224 * PARAMETERS :
8225 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008226 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8227 * urgent metadata in a batch. Always true for
8228 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008229 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008230 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8231 * i.e. even though it doesn't map to a valid partial
8232 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008233 * RETURN : camera_metadata_t*
8234 * metadata in a format specified by fwk
8235 *==========================================================================*/
8236camera_metadata_t*
8237QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008238 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008239 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008240{
8241 CameraMetadata camMetadata;
8242 camera_metadata_t *resultMetadata;
8243
Shuzhen Wang485e2442017-08-02 12:21:08 -07008244 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008245 /* In batch mode, use empty metadata if this is not the last in batch
8246 */
8247 resultMetadata = allocate_camera_metadata(0, 0);
8248 return resultMetadata;
8249 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008250
8251 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8252 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8253 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8254 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8255 }
8256
8257 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8258 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8259 &aecTrigger->trigger, 1);
8260 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8261 &aecTrigger->trigger_id, 1);
8262 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8263 aecTrigger->trigger);
8264 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8265 aecTrigger->trigger_id);
8266 }
8267
8268 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8269 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8270 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8271 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8272 }
8273
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008274 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8275 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8276 if (NAME_NOT_FOUND != val) {
8277 uint8_t fwkAfMode = (uint8_t)val;
8278 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8279 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8280 } else {
8281 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8282 val);
8283 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008284 }
8285
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008286 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8287 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8288 af_trigger->trigger);
8289 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8290 af_trigger->trigger_id);
8291
8292 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8293 mAfTrigger = *af_trigger;
8294 uint32_t fwk_AfState = (uint32_t) *afState;
8295
8296 // If this is the result for a new trigger, check if there is new early
8297 // af state. If there is, use the last af state for all results
8298 // preceding current partial frame number.
8299 for (auto & pendingRequest : mPendingRequestsList) {
8300 if (pendingRequest.frame_number < frame_number) {
8301 pendingRequest.focusStateValid = true;
8302 pendingRequest.focusState = fwk_AfState;
8303 } else if (pendingRequest.frame_number == frame_number) {
8304 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8305 // Check if early AF state for trigger exists. If yes, send AF state as
8306 // partial result for better latency.
8307 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8308 pendingRequest.focusStateSent = true;
8309 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8310 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8311 frame_number, fwkEarlyAfState);
8312 }
8313 }
8314 }
8315 }
8316 }
8317 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8318 &mAfTrigger.trigger, 1);
8319 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8320
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008321 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8322 /*af regions*/
8323 int32_t afRegions[REGIONS_TUPLE_COUNT];
8324 // Adjust crop region from sensor output coordinate system to active
8325 // array coordinate system.
8326 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8327 hAfRegions->rect.width, hAfRegions->rect.height);
8328
8329 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8330 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8331 REGIONS_TUPLE_COUNT);
8332 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8333 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8334 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8335 hAfRegions->rect.height);
8336 }
8337
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008338 // AF region confidence
8339 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8340 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8341 }
8342
Thierry Strudel3d639192016-09-09 11:52:26 -07008343 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8344 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8345 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8346 if (NAME_NOT_FOUND != val) {
8347 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8348 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8349 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8350 } else {
8351 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8352 }
8353 }
8354
8355 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8356 uint32_t aeMode = CAM_AE_MODE_MAX;
8357 int32_t flashMode = CAM_FLASH_MODE_MAX;
8358 int32_t redeye = -1;
8359 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8360 aeMode = *pAeMode;
8361 }
8362 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8363 flashMode = *pFlashMode;
8364 }
8365 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8366 redeye = *pRedeye;
8367 }
8368
8369 if (1 == redeye) {
8370 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8371 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8372 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8373 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8374 flashMode);
8375 if (NAME_NOT_FOUND != val) {
8376 fwk_aeMode = (uint8_t)val;
8377 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8378 } else {
8379 LOGE("Unsupported flash mode %d", flashMode);
8380 }
8381 } else if (aeMode == CAM_AE_MODE_ON) {
8382 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8383 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8384 } else if (aeMode == CAM_AE_MODE_OFF) {
8385 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8386 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008387 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8388 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8389 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008390 } else {
8391 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8392 "flashMode:%d, aeMode:%u!!!",
8393 redeye, flashMode, aeMode);
8394 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008395 if (mInstantAEC) {
8396 // Increment frame Idx count untill a bound reached for instant AEC.
8397 mInstantAecFrameIdxCount++;
8398 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8399 CAM_INTF_META_AEC_INFO, metadata) {
8400 LOGH("ae_params->settled = %d",ae_params->settled);
8401 // If AEC settled, or if number of frames reached bound value,
8402 // should reset instant AEC.
8403 if (ae_params->settled ||
8404 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8405 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8406 mInstantAEC = false;
8407 mResetInstantAEC = true;
8408 mInstantAecFrameIdxCount = 0;
8409 }
8410 }
8411 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008412 resultMetadata = camMetadata.release();
8413 return resultMetadata;
8414}
8415
8416/*===========================================================================
8417 * FUNCTION : dumpMetadataToFile
8418 *
8419 * DESCRIPTION: Dumps tuning metadata to file system
8420 *
8421 * PARAMETERS :
8422 * @meta : tuning metadata
8423 * @dumpFrameCount : current dump frame count
8424 * @enabled : Enable mask
8425 *
8426 *==========================================================================*/
8427void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8428 uint32_t &dumpFrameCount,
8429 bool enabled,
8430 const char *type,
8431 uint32_t frameNumber)
8432{
8433 //Some sanity checks
8434 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8435 LOGE("Tuning sensor data size bigger than expected %d: %d",
8436 meta.tuning_sensor_data_size,
8437 TUNING_SENSOR_DATA_MAX);
8438 return;
8439 }
8440
8441 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8442 LOGE("Tuning VFE data size bigger than expected %d: %d",
8443 meta.tuning_vfe_data_size,
8444 TUNING_VFE_DATA_MAX);
8445 return;
8446 }
8447
8448 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8449 LOGE("Tuning CPP data size bigger than expected %d: %d",
8450 meta.tuning_cpp_data_size,
8451 TUNING_CPP_DATA_MAX);
8452 return;
8453 }
8454
8455 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8456 LOGE("Tuning CAC data size bigger than expected %d: %d",
8457 meta.tuning_cac_data_size,
8458 TUNING_CAC_DATA_MAX);
8459 return;
8460 }
8461 //
8462
8463 if(enabled){
8464 char timeBuf[FILENAME_MAX];
8465 char buf[FILENAME_MAX];
8466 memset(buf, 0, sizeof(buf));
8467 memset(timeBuf, 0, sizeof(timeBuf));
8468 time_t current_time;
8469 struct tm * timeinfo;
8470 time (&current_time);
8471 timeinfo = localtime (&current_time);
8472 if (timeinfo != NULL) {
8473 strftime (timeBuf, sizeof(timeBuf),
8474 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8475 }
8476 String8 filePath(timeBuf);
8477 snprintf(buf,
8478 sizeof(buf),
8479 "%dm_%s_%d.bin",
8480 dumpFrameCount,
8481 type,
8482 frameNumber);
8483 filePath.append(buf);
8484 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8485 if (file_fd >= 0) {
8486 ssize_t written_len = 0;
8487 meta.tuning_data_version = TUNING_DATA_VERSION;
8488 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8489 written_len += write(file_fd, data, sizeof(uint32_t));
8490 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8491 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8492 written_len += write(file_fd, data, sizeof(uint32_t));
8493 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8494 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8495 written_len += write(file_fd, data, sizeof(uint32_t));
8496 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8497 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8498 written_len += write(file_fd, data, sizeof(uint32_t));
8499 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8500 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8501 written_len += write(file_fd, data, sizeof(uint32_t));
8502 meta.tuning_mod3_data_size = 0;
8503 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8504 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8505 written_len += write(file_fd, data, sizeof(uint32_t));
8506 size_t total_size = meta.tuning_sensor_data_size;
8507 data = (void *)((uint8_t *)&meta.data);
8508 written_len += write(file_fd, data, total_size);
8509 total_size = meta.tuning_vfe_data_size;
8510 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8511 written_len += write(file_fd, data, total_size);
8512 total_size = meta.tuning_cpp_data_size;
8513 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8514 written_len += write(file_fd, data, total_size);
8515 total_size = meta.tuning_cac_data_size;
8516 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8517 written_len += write(file_fd, data, total_size);
8518 close(file_fd);
8519 }else {
8520 LOGE("fail to open file for metadata dumping");
8521 }
8522 }
8523}
8524
8525/*===========================================================================
8526 * FUNCTION : cleanAndSortStreamInfo
8527 *
8528 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8529 * and sort them such that raw stream is at the end of the list
8530 * This is a workaround for camera daemon constraint.
8531 *
8532 * PARAMETERS : None
8533 *
8534 *==========================================================================*/
8535void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8536{
8537 List<stream_info_t *> newStreamInfo;
8538
8539 /*clean up invalid streams*/
8540 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8541 it != mStreamInfo.end();) {
8542 if(((*it)->status) == INVALID){
8543 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8544 delete channel;
8545 free(*it);
8546 it = mStreamInfo.erase(it);
8547 } else {
8548 it++;
8549 }
8550 }
8551
8552 // Move preview/video/callback/snapshot streams into newList
8553 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8554 it != mStreamInfo.end();) {
8555 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8556 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8557 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8558 newStreamInfo.push_back(*it);
8559 it = mStreamInfo.erase(it);
8560 } else
8561 it++;
8562 }
8563 // Move raw streams into newList
8564 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8565 it != mStreamInfo.end();) {
8566 newStreamInfo.push_back(*it);
8567 it = mStreamInfo.erase(it);
8568 }
8569
8570 mStreamInfo = newStreamInfo;
8571}
8572
8573/*===========================================================================
8574 * FUNCTION : extractJpegMetadata
8575 *
8576 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8577 * JPEG metadata is cached in HAL, and return as part of capture
8578 * result when metadata is returned from camera daemon.
8579 *
8580 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8581 * @request: capture request
8582 *
8583 *==========================================================================*/
8584void QCamera3HardwareInterface::extractJpegMetadata(
8585 CameraMetadata& jpegMetadata,
8586 const camera3_capture_request_t *request)
8587{
8588 CameraMetadata frame_settings;
8589 frame_settings = request->settings;
8590
8591 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8592 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8593 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8594 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8595
8596 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8597 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8598 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8599 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8600
8601 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8602 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8603 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8604 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8605
8606 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8607 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8608 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8609 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8610
8611 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8612 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8613 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8614 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8615
8616 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8617 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8618 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8619 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8620
8621 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8622 int32_t thumbnail_size[2];
8623 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8624 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8625 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8626 int32_t orientation =
8627 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008628 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008629 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8630 int32_t temp;
8631 temp = thumbnail_size[0];
8632 thumbnail_size[0] = thumbnail_size[1];
8633 thumbnail_size[1] = temp;
8634 }
8635 }
8636 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8637 thumbnail_size,
8638 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8639 }
8640
8641}
8642
8643/*===========================================================================
8644 * FUNCTION : convertToRegions
8645 *
8646 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8647 *
8648 * PARAMETERS :
8649 * @rect : cam_rect_t struct to convert
8650 * @region : int32_t destination array
8651 * @weight : if we are converting from cam_area_t, weight is valid
8652 * else weight = -1
8653 *
8654 *==========================================================================*/
8655void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8656 int32_t *region, int weight)
8657{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008658 region[FACE_LEFT] = rect.left;
8659 region[FACE_TOP] = rect.top;
8660 region[FACE_RIGHT] = rect.left + rect.width;
8661 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008662 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008663 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008664 }
8665}
8666
8667/*===========================================================================
8668 * FUNCTION : convertFromRegions
8669 *
8670 * DESCRIPTION: helper method to convert from array to cam_rect_t
8671 *
8672 * PARAMETERS :
8673 * @rect : cam_rect_t struct to convert
8674 * @region : int32_t destination array
8675 * @weight : if we are converting from cam_area_t, weight is valid
8676 * else weight = -1
8677 *
8678 *==========================================================================*/
8679void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008680 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008681{
Thierry Strudel3d639192016-09-09 11:52:26 -07008682 int32_t x_min = frame_settings.find(tag).data.i32[0];
8683 int32_t y_min = frame_settings.find(tag).data.i32[1];
8684 int32_t x_max = frame_settings.find(tag).data.i32[2];
8685 int32_t y_max = frame_settings.find(tag).data.i32[3];
8686 roi.weight = frame_settings.find(tag).data.i32[4];
8687 roi.rect.left = x_min;
8688 roi.rect.top = y_min;
8689 roi.rect.width = x_max - x_min;
8690 roi.rect.height = y_max - y_min;
8691}
8692
8693/*===========================================================================
8694 * FUNCTION : resetIfNeededROI
8695 *
8696 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8697 * crop region
8698 *
8699 * PARAMETERS :
8700 * @roi : cam_area_t struct to resize
8701 * @scalerCropRegion : cam_crop_region_t region to compare against
8702 *
8703 *
8704 *==========================================================================*/
8705bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8706 const cam_crop_region_t* scalerCropRegion)
8707{
8708 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8709 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8710 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8711 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8712
8713 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8714 * without having this check the calculations below to validate if the roi
8715 * is inside scalar crop region will fail resulting in the roi not being
8716 * reset causing algorithm to continue to use stale roi window
8717 */
8718 if (roi->weight == 0) {
8719 return true;
8720 }
8721
8722 if ((roi_x_max < scalerCropRegion->left) ||
8723 // right edge of roi window is left of scalar crop's left edge
8724 (roi_y_max < scalerCropRegion->top) ||
8725 // bottom edge of roi window is above scalar crop's top edge
8726 (roi->rect.left > crop_x_max) ||
8727 // left edge of roi window is beyond(right) of scalar crop's right edge
8728 (roi->rect.top > crop_y_max)){
8729 // top edge of roi windo is above scalar crop's top edge
8730 return false;
8731 }
8732 if (roi->rect.left < scalerCropRegion->left) {
8733 roi->rect.left = scalerCropRegion->left;
8734 }
8735 if (roi->rect.top < scalerCropRegion->top) {
8736 roi->rect.top = scalerCropRegion->top;
8737 }
8738 if (roi_x_max > crop_x_max) {
8739 roi_x_max = crop_x_max;
8740 }
8741 if (roi_y_max > crop_y_max) {
8742 roi_y_max = crop_y_max;
8743 }
8744 roi->rect.width = roi_x_max - roi->rect.left;
8745 roi->rect.height = roi_y_max - roi->rect.top;
8746 return true;
8747}
8748
8749/*===========================================================================
8750 * FUNCTION : convertLandmarks
8751 *
8752 * DESCRIPTION: helper method to extract the landmarks from face detection info
8753 *
8754 * PARAMETERS :
8755 * @landmark_data : input landmark data to be converted
8756 * @landmarks : int32_t destination array
8757 *
8758 *
8759 *==========================================================================*/
8760void QCamera3HardwareInterface::convertLandmarks(
8761 cam_face_landmarks_info_t landmark_data,
8762 int32_t *landmarks)
8763{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008764 if (landmark_data.is_left_eye_valid) {
8765 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8766 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8767 } else {
8768 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8769 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8770 }
8771
8772 if (landmark_data.is_right_eye_valid) {
8773 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8774 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8775 } else {
8776 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8777 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8778 }
8779
8780 if (landmark_data.is_mouth_valid) {
8781 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8782 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8783 } else {
8784 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8785 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8786 }
8787}
8788
8789/*===========================================================================
8790 * FUNCTION : setInvalidLandmarks
8791 *
8792 * DESCRIPTION: helper method to set invalid landmarks
8793 *
8794 * PARAMETERS :
8795 * @landmarks : int32_t destination array
8796 *
8797 *
8798 *==========================================================================*/
8799void QCamera3HardwareInterface::setInvalidLandmarks(
8800 int32_t *landmarks)
8801{
8802 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8803 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8804 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8805 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8806 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8807 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008808}
8809
8810#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008811
8812/*===========================================================================
8813 * FUNCTION : getCapabilities
8814 *
8815 * DESCRIPTION: query camera capability from back-end
8816 *
8817 * PARAMETERS :
8818 * @ops : mm-interface ops structure
8819 * @cam_handle : camera handle for which we need capability
8820 *
8821 * RETURN : ptr type of capability structure
8822 * capability for success
8823 * NULL for failure
8824 *==========================================================================*/
8825cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8826 uint32_t cam_handle)
8827{
8828 int rc = NO_ERROR;
8829 QCamera3HeapMemory *capabilityHeap = NULL;
8830 cam_capability_t *cap_ptr = NULL;
8831
8832 if (ops == NULL) {
8833 LOGE("Invalid arguments");
8834 return NULL;
8835 }
8836
8837 capabilityHeap = new QCamera3HeapMemory(1);
8838 if (capabilityHeap == NULL) {
8839 LOGE("creation of capabilityHeap failed");
8840 return NULL;
8841 }
8842
8843 /* Allocate memory for capability buffer */
8844 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8845 if(rc != OK) {
8846 LOGE("No memory for cappability");
8847 goto allocate_failed;
8848 }
8849
8850 /* Map memory for capability buffer */
8851 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8852
8853 rc = ops->map_buf(cam_handle,
8854 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8855 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8856 if(rc < 0) {
8857 LOGE("failed to map capability buffer");
8858 rc = FAILED_TRANSACTION;
8859 goto map_failed;
8860 }
8861
8862 /* Query Capability */
8863 rc = ops->query_capability(cam_handle);
8864 if(rc < 0) {
8865 LOGE("failed to query capability");
8866 rc = FAILED_TRANSACTION;
8867 goto query_failed;
8868 }
8869
8870 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8871 if (cap_ptr == NULL) {
8872 LOGE("out of memory");
8873 rc = NO_MEMORY;
8874 goto query_failed;
8875 }
8876
8877 memset(cap_ptr, 0, sizeof(cam_capability_t));
8878 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8879
8880 int index;
8881 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8882 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8883 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8884 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8885 }
8886
8887query_failed:
8888 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8889map_failed:
8890 capabilityHeap->deallocate();
8891allocate_failed:
8892 delete capabilityHeap;
8893
8894 if (rc != NO_ERROR) {
8895 return NULL;
8896 } else {
8897 return cap_ptr;
8898 }
8899}
8900
Thierry Strudel3d639192016-09-09 11:52:26 -07008901/*===========================================================================
8902 * FUNCTION : initCapabilities
8903 *
8904 * DESCRIPTION: initialize camera capabilities in static data struct
8905 *
8906 * PARAMETERS :
8907 * @cameraId : camera Id
8908 *
8909 * RETURN : int32_t type of status
8910 * NO_ERROR -- success
8911 * none-zero failure code
8912 *==========================================================================*/
8913int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8914{
8915 int rc = 0;
8916 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008917 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008918
8919 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8920 if (rc) {
8921 LOGE("camera_open failed. rc = %d", rc);
8922 goto open_failed;
8923 }
8924 if (!cameraHandle) {
8925 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8926 goto open_failed;
8927 }
8928
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008929 handle = get_main_camera_handle(cameraHandle->camera_handle);
8930 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8931 if (gCamCapability[cameraId] == NULL) {
8932 rc = FAILED_TRANSACTION;
8933 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008934 }
8935
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008936 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008937 if (is_dual_camera_by_idx(cameraId)) {
8938 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8939 gCamCapability[cameraId]->aux_cam_cap =
8940 getCapabilities(cameraHandle->ops, handle);
8941 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8942 rc = FAILED_TRANSACTION;
8943 free(gCamCapability[cameraId]);
8944 goto failed_op;
8945 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008946
8947 // Copy the main camera capability to main_cam_cap struct
8948 gCamCapability[cameraId]->main_cam_cap =
8949 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8950 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8951 LOGE("out of memory");
8952 rc = NO_MEMORY;
8953 goto failed_op;
8954 }
8955 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8956 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008957 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008958failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008959 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8960 cameraHandle = NULL;
8961open_failed:
8962 return rc;
8963}
8964
8965/*==========================================================================
8966 * FUNCTION : get3Aversion
8967 *
8968 * DESCRIPTION: get the Q3A S/W version
8969 *
8970 * PARAMETERS :
8971 * @sw_version: Reference of Q3A structure which will hold version info upon
8972 * return
8973 *
8974 * RETURN : None
8975 *
8976 *==========================================================================*/
8977void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8978{
8979 if(gCamCapability[mCameraId])
8980 sw_version = gCamCapability[mCameraId]->q3a_version;
8981 else
8982 LOGE("Capability structure NULL!");
8983}
8984
8985
8986/*===========================================================================
8987 * FUNCTION : initParameters
8988 *
8989 * DESCRIPTION: initialize camera parameters
8990 *
8991 * PARAMETERS :
8992 *
8993 * RETURN : int32_t type of status
8994 * NO_ERROR -- success
8995 * none-zero failure code
8996 *==========================================================================*/
8997int QCamera3HardwareInterface::initParameters()
8998{
8999 int rc = 0;
9000
9001 //Allocate Set Param Buffer
9002 mParamHeap = new QCamera3HeapMemory(1);
9003 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9004 if(rc != OK) {
9005 rc = NO_MEMORY;
9006 LOGE("Failed to allocate SETPARM Heap memory");
9007 delete mParamHeap;
9008 mParamHeap = NULL;
9009 return rc;
9010 }
9011
9012 //Map memory for parameters buffer
9013 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9014 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9015 mParamHeap->getFd(0),
9016 sizeof(metadata_buffer_t),
9017 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9018 if(rc < 0) {
9019 LOGE("failed to map SETPARM buffer");
9020 rc = FAILED_TRANSACTION;
9021 mParamHeap->deallocate();
9022 delete mParamHeap;
9023 mParamHeap = NULL;
9024 return rc;
9025 }
9026
9027 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9028
9029 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9030 return rc;
9031}
9032
9033/*===========================================================================
9034 * FUNCTION : deinitParameters
9035 *
9036 * DESCRIPTION: de-initialize camera parameters
9037 *
9038 * PARAMETERS :
9039 *
9040 * RETURN : NONE
9041 *==========================================================================*/
9042void QCamera3HardwareInterface::deinitParameters()
9043{
9044 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9045 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9046
9047 mParamHeap->deallocate();
9048 delete mParamHeap;
9049 mParamHeap = NULL;
9050
9051 mParameters = NULL;
9052
9053 free(mPrevParameters);
9054 mPrevParameters = NULL;
9055}
9056
9057/*===========================================================================
9058 * FUNCTION : calcMaxJpegSize
9059 *
9060 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9061 *
9062 * PARAMETERS :
9063 *
9064 * RETURN : max_jpeg_size
9065 *==========================================================================*/
9066size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9067{
9068 size_t max_jpeg_size = 0;
9069 size_t temp_width, temp_height;
9070 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9071 MAX_SIZES_CNT);
9072 for (size_t i = 0; i < count; i++) {
9073 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9074 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9075 if (temp_width * temp_height > max_jpeg_size ) {
9076 max_jpeg_size = temp_width * temp_height;
9077 }
9078 }
9079 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9080 return max_jpeg_size;
9081}
9082
9083/*===========================================================================
9084 * FUNCTION : getMaxRawSize
9085 *
9086 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9087 *
9088 * PARAMETERS :
9089 *
9090 * RETURN : Largest supported Raw Dimension
9091 *==========================================================================*/
9092cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9093{
9094 int max_width = 0;
9095 cam_dimension_t maxRawSize;
9096
9097 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9098 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9099 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9100 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9101 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9102 }
9103 }
9104 return maxRawSize;
9105}
9106
9107
9108/*===========================================================================
9109 * FUNCTION : calcMaxJpegDim
9110 *
9111 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9112 *
9113 * PARAMETERS :
9114 *
9115 * RETURN : max_jpeg_dim
9116 *==========================================================================*/
9117cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9118{
9119 cam_dimension_t max_jpeg_dim;
9120 cam_dimension_t curr_jpeg_dim;
9121 max_jpeg_dim.width = 0;
9122 max_jpeg_dim.height = 0;
9123 curr_jpeg_dim.width = 0;
9124 curr_jpeg_dim.height = 0;
9125 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9126 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9127 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9128 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9129 max_jpeg_dim.width * max_jpeg_dim.height ) {
9130 max_jpeg_dim.width = curr_jpeg_dim.width;
9131 max_jpeg_dim.height = curr_jpeg_dim.height;
9132 }
9133 }
9134 return max_jpeg_dim;
9135}
9136
9137/*===========================================================================
9138 * FUNCTION : addStreamConfig
9139 *
9140 * DESCRIPTION: adds the stream configuration to the array
9141 *
9142 * PARAMETERS :
9143 * @available_stream_configs : pointer to stream configuration array
9144 * @scalar_format : scalar format
9145 * @dim : configuration dimension
9146 * @config_type : input or output configuration type
9147 *
9148 * RETURN : NONE
9149 *==========================================================================*/
9150void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9151 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9152{
9153 available_stream_configs.add(scalar_format);
9154 available_stream_configs.add(dim.width);
9155 available_stream_configs.add(dim.height);
9156 available_stream_configs.add(config_type);
9157}
9158
9159/*===========================================================================
9160 * FUNCTION : suppportBurstCapture
9161 *
9162 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9163 *
9164 * PARAMETERS :
9165 * @cameraId : camera Id
9166 *
9167 * RETURN : true if camera supports BURST_CAPTURE
9168 * false otherwise
9169 *==========================================================================*/
9170bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9171{
9172 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9173 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9174 const int32_t highResWidth = 3264;
9175 const int32_t highResHeight = 2448;
9176
9177 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9178 // Maximum resolution images cannot be captured at >= 10fps
9179 // -> not supporting BURST_CAPTURE
9180 return false;
9181 }
9182
9183 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9184 // Maximum resolution images can be captured at >= 20fps
9185 // --> supporting BURST_CAPTURE
9186 return true;
9187 }
9188
9189 // Find the smallest highRes resolution, or largest resolution if there is none
9190 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9191 MAX_SIZES_CNT);
9192 size_t highRes = 0;
9193 while ((highRes + 1 < totalCnt) &&
9194 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9195 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9196 highResWidth * highResHeight)) {
9197 highRes++;
9198 }
9199 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9200 return true;
9201 } else {
9202 return false;
9203 }
9204}
9205
9206/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009207 * FUNCTION : getPDStatIndex
9208 *
9209 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9210 *
9211 * PARAMETERS :
9212 * @caps : camera capabilities
9213 *
9214 * RETURN : int32_t type
9215 * non-negative - on success
9216 * -1 - on failure
9217 *==========================================================================*/
9218int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9219 if (nullptr == caps) {
9220 return -1;
9221 }
9222
9223 uint32_t metaRawCount = caps->meta_raw_channel_count;
9224 int32_t ret = -1;
9225 for (size_t i = 0; i < metaRawCount; i++) {
9226 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9227 ret = i;
9228 break;
9229 }
9230 }
9231
9232 return ret;
9233}
9234
9235/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009236 * FUNCTION : initStaticMetadata
9237 *
9238 * DESCRIPTION: initialize the static metadata
9239 *
9240 * PARAMETERS :
9241 * @cameraId : camera Id
9242 *
9243 * RETURN : int32_t type of status
9244 * 0 -- success
9245 * non-zero failure code
9246 *==========================================================================*/
9247int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9248{
9249 int rc = 0;
9250 CameraMetadata staticInfo;
9251 size_t count = 0;
9252 bool limitedDevice = false;
9253 char prop[PROPERTY_VALUE_MAX];
9254 bool supportBurst = false;
9255
9256 supportBurst = supportBurstCapture(cameraId);
9257
9258 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9259 * guaranteed or if min fps of max resolution is less than 20 fps, its
9260 * advertised as limited device*/
9261 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9262 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9263 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9264 !supportBurst;
9265
9266 uint8_t supportedHwLvl = limitedDevice ?
9267 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009268#ifndef USE_HAL_3_3
9269 // LEVEL_3 - This device will support level 3.
9270 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9271#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009272 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009273#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009274
9275 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9276 &supportedHwLvl, 1);
9277
9278 bool facingBack = false;
9279 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9280 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9281 facingBack = true;
9282 }
9283 /*HAL 3 only*/
9284 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9285 &gCamCapability[cameraId]->min_focus_distance, 1);
9286
9287 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9288 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9289
9290 /*should be using focal lengths but sensor doesn't provide that info now*/
9291 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9292 &gCamCapability[cameraId]->focal_length,
9293 1);
9294
9295 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9296 gCamCapability[cameraId]->apertures,
9297 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9298
9299 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9300 gCamCapability[cameraId]->filter_densities,
9301 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9302
9303
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009304 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9305 size_t mode_count =
9306 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9307 for (size_t i = 0; i < mode_count; i++) {
9308 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9309 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009310 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009311 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009312
9313 int32_t lens_shading_map_size[] = {
9314 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9315 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9316 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9317 lens_shading_map_size,
9318 sizeof(lens_shading_map_size)/sizeof(int32_t));
9319
9320 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9321 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9322
9323 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9324 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9325
9326 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9327 &gCamCapability[cameraId]->max_frame_duration, 1);
9328
9329 camera_metadata_rational baseGainFactor = {
9330 gCamCapability[cameraId]->base_gain_factor.numerator,
9331 gCamCapability[cameraId]->base_gain_factor.denominator};
9332 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9333 &baseGainFactor, 1);
9334
9335 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9336 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9337
9338 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9339 gCamCapability[cameraId]->pixel_array_size.height};
9340 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9341 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9342
9343 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9344 gCamCapability[cameraId]->active_array_size.top,
9345 gCamCapability[cameraId]->active_array_size.width,
9346 gCamCapability[cameraId]->active_array_size.height};
9347 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9348 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9349
9350 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9351 &gCamCapability[cameraId]->white_level, 1);
9352
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009353 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9354 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9355 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009356 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009357 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009358
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009359#ifndef USE_HAL_3_3
9360 bool hasBlackRegions = false;
9361 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9362 LOGW("black_region_count: %d is bounded to %d",
9363 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9364 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9365 }
9366 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9367 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9368 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9369 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9370 }
9371 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9372 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9373 hasBlackRegions = true;
9374 }
9375#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009376 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9377 &gCamCapability[cameraId]->flash_charge_duration, 1);
9378
9379 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9380 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9381
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009382 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9383 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9384 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009385 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9386 &timestampSource, 1);
9387
Thierry Strudel54dc9782017-02-15 12:12:10 -08009388 //update histogram vendor data
9389 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009390 &gCamCapability[cameraId]->histogram_size, 1);
9391
Thierry Strudel54dc9782017-02-15 12:12:10 -08009392 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009393 &gCamCapability[cameraId]->max_histogram_count, 1);
9394
Shuzhen Wang14415f52016-11-16 18:26:18 -08009395 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9396 //so that app can request fewer number of bins than the maximum supported.
9397 std::vector<int32_t> histBins;
9398 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9399 histBins.push_back(maxHistBins);
9400 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9401 (maxHistBins & 0x1) == 0) {
9402 histBins.push_back(maxHistBins >> 1);
9403 maxHistBins >>= 1;
9404 }
9405 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9406 histBins.data(), histBins.size());
9407
Thierry Strudel3d639192016-09-09 11:52:26 -07009408 int32_t sharpness_map_size[] = {
9409 gCamCapability[cameraId]->sharpness_map_size.width,
9410 gCamCapability[cameraId]->sharpness_map_size.height};
9411
9412 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9413 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9414
9415 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9416 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9417
Emilian Peev0f3c3162017-03-15 12:57:46 +00009418 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9419 if (0 <= indexPD) {
9420 // Advertise PD stats data as part of the Depth capabilities
9421 int32_t depthWidth =
9422 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9423 int32_t depthHeight =
9424 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009425 int32_t depthStride =
9426 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009427 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9428 assert(0 < depthSamplesCount);
9429 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9430 &depthSamplesCount, 1);
9431
9432 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9433 depthHeight,
9434 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9435 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9436 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9437 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9438 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9439
9440 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9441 depthHeight, 33333333,
9442 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9443 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9444 depthMinDuration,
9445 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9446
9447 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9448 depthHeight, 0,
9449 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9450 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9451 depthStallDuration,
9452 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9453
9454 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9455 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009456
9457 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9458 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9459 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009460 }
9461
Thierry Strudel3d639192016-09-09 11:52:26 -07009462 int32_t scalar_formats[] = {
9463 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9464 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9465 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9466 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9467 HAL_PIXEL_FORMAT_RAW10,
9468 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009469 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9470 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9471 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009472
9473 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9474 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9475 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9476 count, MAX_SIZES_CNT, available_processed_sizes);
9477 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9478 available_processed_sizes, count * 2);
9479
9480 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9481 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9482 makeTable(gCamCapability[cameraId]->raw_dim,
9483 count, MAX_SIZES_CNT, available_raw_sizes);
9484 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9485 available_raw_sizes, count * 2);
9486
9487 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9488 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9489 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9490 count, MAX_SIZES_CNT, available_fps_ranges);
9491 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9492 available_fps_ranges, count * 2);
9493
9494 camera_metadata_rational exposureCompensationStep = {
9495 gCamCapability[cameraId]->exp_compensation_step.numerator,
9496 gCamCapability[cameraId]->exp_compensation_step.denominator};
9497 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9498 &exposureCompensationStep, 1);
9499
9500 Vector<uint8_t> availableVstabModes;
9501 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9502 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009503 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009504 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009505 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009506 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009507 count = IS_TYPE_MAX;
9508 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9509 for (size_t i = 0; i < count; i++) {
9510 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9511 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9512 eisSupported = true;
9513 break;
9514 }
9515 }
9516 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009517 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9518 }
9519 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9520 availableVstabModes.array(), availableVstabModes.size());
9521
9522 /*HAL 1 and HAL 3 common*/
9523 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9524 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9525 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009526 // Cap the max zoom to the max preferred value
9527 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009528 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9529 &maxZoom, 1);
9530
9531 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9532 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9533
9534 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9535 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9536 max3aRegions[2] = 0; /* AF not supported */
9537 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9538 max3aRegions, 3);
9539
9540 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9541 memset(prop, 0, sizeof(prop));
9542 property_get("persist.camera.facedetect", prop, "1");
9543 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9544 LOGD("Support face detection mode: %d",
9545 supportedFaceDetectMode);
9546
9547 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009548 /* support mode should be OFF if max number of face is 0 */
9549 if (maxFaces <= 0) {
9550 supportedFaceDetectMode = 0;
9551 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009552 Vector<uint8_t> availableFaceDetectModes;
9553 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9554 if (supportedFaceDetectMode == 1) {
9555 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9556 } else if (supportedFaceDetectMode == 2) {
9557 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9558 } else if (supportedFaceDetectMode == 3) {
9559 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9560 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9561 } else {
9562 maxFaces = 0;
9563 }
9564 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9565 availableFaceDetectModes.array(),
9566 availableFaceDetectModes.size());
9567 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9568 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009569 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9570 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9571 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009572
9573 int32_t exposureCompensationRange[] = {
9574 gCamCapability[cameraId]->exposure_compensation_min,
9575 gCamCapability[cameraId]->exposure_compensation_max};
9576 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9577 exposureCompensationRange,
9578 sizeof(exposureCompensationRange)/sizeof(int32_t));
9579
9580 uint8_t lensFacing = (facingBack) ?
9581 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9582 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9583
9584 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9585 available_thumbnail_sizes,
9586 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9587
9588 /*all sizes will be clubbed into this tag*/
9589 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9590 /*android.scaler.availableStreamConfigurations*/
9591 Vector<int32_t> available_stream_configs;
9592 cam_dimension_t active_array_dim;
9593 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9594 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009595
9596 /*advertise list of input dimensions supported based on below property.
9597 By default all sizes upto 5MP will be advertised.
9598 Note that the setprop resolution format should be WxH.
9599 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9600 To list all supported sizes, setprop needs to be set with "0x0" */
9601 cam_dimension_t minInputSize = {2592,1944}; //5MP
9602 memset(prop, 0, sizeof(prop));
9603 property_get("persist.camera.input.minsize", prop, "2592x1944");
9604 if (strlen(prop) > 0) {
9605 char *saveptr = NULL;
9606 char *token = strtok_r(prop, "x", &saveptr);
9607 if (token != NULL) {
9608 minInputSize.width = atoi(token);
9609 }
9610 token = strtok_r(NULL, "x", &saveptr);
9611 if (token != NULL) {
9612 minInputSize.height = atoi(token);
9613 }
9614 }
9615
Thierry Strudel3d639192016-09-09 11:52:26 -07009616 /* Add input/output stream configurations for each scalar formats*/
9617 for (size_t j = 0; j < scalar_formats_count; j++) {
9618 switch (scalar_formats[j]) {
9619 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9620 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9621 case HAL_PIXEL_FORMAT_RAW10:
9622 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9623 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9624 addStreamConfig(available_stream_configs, scalar_formats[j],
9625 gCamCapability[cameraId]->raw_dim[i],
9626 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9627 }
9628 break;
9629 case HAL_PIXEL_FORMAT_BLOB:
9630 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9631 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9632 addStreamConfig(available_stream_configs, scalar_formats[j],
9633 gCamCapability[cameraId]->picture_sizes_tbl[i],
9634 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9635 }
9636 break;
9637 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9638 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9639 default:
9640 cam_dimension_t largest_picture_size;
9641 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9642 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9643 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9644 addStreamConfig(available_stream_configs, scalar_formats[j],
9645 gCamCapability[cameraId]->picture_sizes_tbl[i],
9646 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009647 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009648 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9649 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009650 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9651 >= minInputSize.width) || (gCamCapability[cameraId]->
9652 picture_sizes_tbl[i].height >= minInputSize.height)) {
9653 addStreamConfig(available_stream_configs, scalar_formats[j],
9654 gCamCapability[cameraId]->picture_sizes_tbl[i],
9655 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9656 }
9657 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009658 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009659
Thierry Strudel3d639192016-09-09 11:52:26 -07009660 break;
9661 }
9662 }
9663
9664 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9665 available_stream_configs.array(), available_stream_configs.size());
9666 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9667 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9668
9669 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9670 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9671
9672 /* android.scaler.availableMinFrameDurations */
9673 Vector<int64_t> available_min_durations;
9674 for (size_t j = 0; j < scalar_formats_count; j++) {
9675 switch (scalar_formats[j]) {
9676 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9677 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9678 case HAL_PIXEL_FORMAT_RAW10:
9679 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9680 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9681 available_min_durations.add(scalar_formats[j]);
9682 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9683 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9684 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9685 }
9686 break;
9687 default:
9688 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9689 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9690 available_min_durations.add(scalar_formats[j]);
9691 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9692 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9693 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9694 }
9695 break;
9696 }
9697 }
9698 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9699 available_min_durations.array(), available_min_durations.size());
9700
9701 Vector<int32_t> available_hfr_configs;
9702 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9703 int32_t fps = 0;
9704 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9705 case CAM_HFR_MODE_60FPS:
9706 fps = 60;
9707 break;
9708 case CAM_HFR_MODE_90FPS:
9709 fps = 90;
9710 break;
9711 case CAM_HFR_MODE_120FPS:
9712 fps = 120;
9713 break;
9714 case CAM_HFR_MODE_150FPS:
9715 fps = 150;
9716 break;
9717 case CAM_HFR_MODE_180FPS:
9718 fps = 180;
9719 break;
9720 case CAM_HFR_MODE_210FPS:
9721 fps = 210;
9722 break;
9723 case CAM_HFR_MODE_240FPS:
9724 fps = 240;
9725 break;
9726 case CAM_HFR_MODE_480FPS:
9727 fps = 480;
9728 break;
9729 case CAM_HFR_MODE_OFF:
9730 case CAM_HFR_MODE_MAX:
9731 default:
9732 break;
9733 }
9734
9735 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9736 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9737 /* For each HFR frame rate, need to advertise one variable fps range
9738 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9739 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9740 * set by the app. When video recording is started, [120, 120] is
9741 * set. This way sensor configuration does not change when recording
9742 * is started */
9743
9744 /* (width, height, fps_min, fps_max, batch_size_max) */
9745 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9746 j < MAX_SIZES_CNT; j++) {
9747 available_hfr_configs.add(
9748 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9749 available_hfr_configs.add(
9750 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9751 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9752 available_hfr_configs.add(fps);
9753 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9754
9755 /* (width, height, fps_min, fps_max, batch_size_max) */
9756 available_hfr_configs.add(
9757 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9758 available_hfr_configs.add(
9759 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9760 available_hfr_configs.add(fps);
9761 available_hfr_configs.add(fps);
9762 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9763 }
9764 }
9765 }
9766 //Advertise HFR capability only if the property is set
9767 memset(prop, 0, sizeof(prop));
9768 property_get("persist.camera.hal3hfr.enable", prop, "1");
9769 uint8_t hfrEnable = (uint8_t)atoi(prop);
9770
9771 if(hfrEnable && available_hfr_configs.array()) {
9772 staticInfo.update(
9773 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9774 available_hfr_configs.array(), available_hfr_configs.size());
9775 }
9776
9777 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9778 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9779 &max_jpeg_size, 1);
9780
9781 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9782 size_t size = 0;
9783 count = CAM_EFFECT_MODE_MAX;
9784 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9785 for (size_t i = 0; i < count; i++) {
9786 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9787 gCamCapability[cameraId]->supported_effects[i]);
9788 if (NAME_NOT_FOUND != val) {
9789 avail_effects[size] = (uint8_t)val;
9790 size++;
9791 }
9792 }
9793 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9794 avail_effects,
9795 size);
9796
9797 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9798 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9799 size_t supported_scene_modes_cnt = 0;
9800 count = CAM_SCENE_MODE_MAX;
9801 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9802 for (size_t i = 0; i < count; i++) {
9803 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9804 CAM_SCENE_MODE_OFF) {
9805 int val = lookupFwkName(SCENE_MODES_MAP,
9806 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9807 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009808
Thierry Strudel3d639192016-09-09 11:52:26 -07009809 if (NAME_NOT_FOUND != val) {
9810 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9811 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9812 supported_scene_modes_cnt++;
9813 }
9814 }
9815 }
9816 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9817 avail_scene_modes,
9818 supported_scene_modes_cnt);
9819
9820 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9821 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9822 supported_scene_modes_cnt,
9823 CAM_SCENE_MODE_MAX,
9824 scene_mode_overrides,
9825 supported_indexes,
9826 cameraId);
9827
9828 if (supported_scene_modes_cnt == 0) {
9829 supported_scene_modes_cnt = 1;
9830 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9831 }
9832
9833 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9834 scene_mode_overrides, supported_scene_modes_cnt * 3);
9835
9836 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9837 ANDROID_CONTROL_MODE_AUTO,
9838 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9839 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9840 available_control_modes,
9841 3);
9842
9843 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9844 size = 0;
9845 count = CAM_ANTIBANDING_MODE_MAX;
9846 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9847 for (size_t i = 0; i < count; i++) {
9848 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9849 gCamCapability[cameraId]->supported_antibandings[i]);
9850 if (NAME_NOT_FOUND != val) {
9851 avail_antibanding_modes[size] = (uint8_t)val;
9852 size++;
9853 }
9854
9855 }
9856 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9857 avail_antibanding_modes,
9858 size);
9859
9860 uint8_t avail_abberation_modes[] = {
9861 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9862 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9863 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9864 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9865 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9866 if (0 == count) {
9867 // If no aberration correction modes are available for a device, this advertise OFF mode
9868 size = 1;
9869 } else {
9870 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9871 // So, advertize all 3 modes if atleast any one mode is supported as per the
9872 // new M requirement
9873 size = 3;
9874 }
9875 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9876 avail_abberation_modes,
9877 size);
9878
9879 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9880 size = 0;
9881 count = CAM_FOCUS_MODE_MAX;
9882 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9883 for (size_t i = 0; i < count; i++) {
9884 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9885 gCamCapability[cameraId]->supported_focus_modes[i]);
9886 if (NAME_NOT_FOUND != val) {
9887 avail_af_modes[size] = (uint8_t)val;
9888 size++;
9889 }
9890 }
9891 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9892 avail_af_modes,
9893 size);
9894
9895 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9896 size = 0;
9897 count = CAM_WB_MODE_MAX;
9898 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9899 for (size_t i = 0; i < count; i++) {
9900 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9901 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9902 gCamCapability[cameraId]->supported_white_balances[i]);
9903 if (NAME_NOT_FOUND != val) {
9904 avail_awb_modes[size] = (uint8_t)val;
9905 size++;
9906 }
9907 }
9908 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9909 avail_awb_modes,
9910 size);
9911
9912 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9913 count = CAM_FLASH_FIRING_LEVEL_MAX;
9914 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9915 count);
9916 for (size_t i = 0; i < count; i++) {
9917 available_flash_levels[i] =
9918 gCamCapability[cameraId]->supported_firing_levels[i];
9919 }
9920 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9921 available_flash_levels, count);
9922
9923 uint8_t flashAvailable;
9924 if (gCamCapability[cameraId]->flash_available)
9925 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9926 else
9927 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9928 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9929 &flashAvailable, 1);
9930
9931 Vector<uint8_t> avail_ae_modes;
9932 count = CAM_AE_MODE_MAX;
9933 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9934 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009935 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9936 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9937 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9938 }
9939 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009940 }
9941 if (flashAvailable) {
9942 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9943 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9944 }
9945 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9946 avail_ae_modes.array(),
9947 avail_ae_modes.size());
9948
9949 int32_t sensitivity_range[2];
9950 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9951 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9952 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9953 sensitivity_range,
9954 sizeof(sensitivity_range) / sizeof(int32_t));
9955
9956 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9957 &gCamCapability[cameraId]->max_analog_sensitivity,
9958 1);
9959
9960 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9961 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9962 &sensor_orientation,
9963 1);
9964
9965 int32_t max_output_streams[] = {
9966 MAX_STALLING_STREAMS,
9967 MAX_PROCESSED_STREAMS,
9968 MAX_RAW_STREAMS};
9969 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9970 max_output_streams,
9971 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9972
9973 uint8_t avail_leds = 0;
9974 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9975 &avail_leds, 0);
9976
9977 uint8_t focus_dist_calibrated;
9978 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9979 gCamCapability[cameraId]->focus_dist_calibrated);
9980 if (NAME_NOT_FOUND != val) {
9981 focus_dist_calibrated = (uint8_t)val;
9982 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9983 &focus_dist_calibrated, 1);
9984 }
9985
9986 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9987 size = 0;
9988 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9989 MAX_TEST_PATTERN_CNT);
9990 for (size_t i = 0; i < count; i++) {
9991 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9992 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9993 if (NAME_NOT_FOUND != testpatternMode) {
9994 avail_testpattern_modes[size] = testpatternMode;
9995 size++;
9996 }
9997 }
9998 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9999 avail_testpattern_modes,
10000 size);
10001
10002 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10003 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10004 &max_pipeline_depth,
10005 1);
10006
10007 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10008 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10009 &partial_result_count,
10010 1);
10011
10012 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10013 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10014
10015 Vector<uint8_t> available_capabilities;
10016 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10017 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10018 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10019 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10020 if (supportBurst) {
10021 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10022 }
10023 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10024 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10025 if (hfrEnable && available_hfr_configs.array()) {
10026 available_capabilities.add(
10027 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10028 }
10029
10030 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10031 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10032 }
10033 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10034 available_capabilities.array(),
10035 available_capabilities.size());
10036
10037 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10038 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10039 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10040 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10041
10042 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10043 &aeLockAvailable, 1);
10044
10045 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10046 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10047 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10048 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10049
10050 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10051 &awbLockAvailable, 1);
10052
10053 int32_t max_input_streams = 1;
10054 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10055 &max_input_streams,
10056 1);
10057
10058 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10059 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10060 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10061 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10062 HAL_PIXEL_FORMAT_YCbCr_420_888};
10063 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10064 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10065
10066 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10067 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10068 &max_latency,
10069 1);
10070
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010071#ifndef USE_HAL_3_3
10072 int32_t isp_sensitivity_range[2];
10073 isp_sensitivity_range[0] =
10074 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10075 isp_sensitivity_range[1] =
10076 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10077 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10078 isp_sensitivity_range,
10079 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10080#endif
10081
Thierry Strudel3d639192016-09-09 11:52:26 -070010082 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10083 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10084 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10085 available_hot_pixel_modes,
10086 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10087
10088 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10089 ANDROID_SHADING_MODE_FAST,
10090 ANDROID_SHADING_MODE_HIGH_QUALITY};
10091 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10092 available_shading_modes,
10093 3);
10094
10095 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10096 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10097 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10098 available_lens_shading_map_modes,
10099 2);
10100
10101 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10102 ANDROID_EDGE_MODE_FAST,
10103 ANDROID_EDGE_MODE_HIGH_QUALITY,
10104 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10105 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10106 available_edge_modes,
10107 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10108
10109 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10110 ANDROID_NOISE_REDUCTION_MODE_FAST,
10111 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10112 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10113 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10114 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10115 available_noise_red_modes,
10116 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10117
10118 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10119 ANDROID_TONEMAP_MODE_FAST,
10120 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10121 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10122 available_tonemap_modes,
10123 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10124
10125 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10126 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10127 available_hot_pixel_map_modes,
10128 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10129
10130 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10131 gCamCapability[cameraId]->reference_illuminant1);
10132 if (NAME_NOT_FOUND != val) {
10133 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10134 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10135 }
10136
10137 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10138 gCamCapability[cameraId]->reference_illuminant2);
10139 if (NAME_NOT_FOUND != val) {
10140 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10141 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10142 }
10143
10144 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10145 (void *)gCamCapability[cameraId]->forward_matrix1,
10146 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10147
10148 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10149 (void *)gCamCapability[cameraId]->forward_matrix2,
10150 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10151
10152 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10153 (void *)gCamCapability[cameraId]->color_transform1,
10154 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10155
10156 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10157 (void *)gCamCapability[cameraId]->color_transform2,
10158 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10159
10160 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10161 (void *)gCamCapability[cameraId]->calibration_transform1,
10162 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10163
10164 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10165 (void *)gCamCapability[cameraId]->calibration_transform2,
10166 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10167
10168 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10169 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10170 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10171 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10172 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10173 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10174 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10175 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10176 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10177 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10178 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10179 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10180 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10181 ANDROID_JPEG_GPS_COORDINATES,
10182 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10183 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10184 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10185 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10186 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10187 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10188 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10189 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10190 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10191 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010192#ifndef USE_HAL_3_3
10193 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10194#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010195 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010196 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010197 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10198 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010199 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010200 /* DevCamDebug metadata request_keys_basic */
10201 DEVCAMDEBUG_META_ENABLE,
10202 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010203 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010204 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010205 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010206 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010207 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010208 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010209
10210 size_t request_keys_cnt =
10211 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10212 Vector<int32_t> available_request_keys;
10213 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10214 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10215 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10216 }
10217
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010218 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010219 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010220 }
10221
Thierry Strudel3d639192016-09-09 11:52:26 -070010222 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10223 available_request_keys.array(), available_request_keys.size());
10224
10225 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10226 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10227 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10228 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10229 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10230 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10231 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10232 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10233 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10234 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10235 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10236 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10237 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10238 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10239 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10240 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10241 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010242 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010243 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10244 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10245 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010246 ANDROID_STATISTICS_FACE_SCORES,
10247#ifndef USE_HAL_3_3
10248 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10249#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010250 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010251 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010252 // DevCamDebug metadata result_keys_basic
10253 DEVCAMDEBUG_META_ENABLE,
10254 // DevCamDebug metadata result_keys AF
10255 DEVCAMDEBUG_AF_LENS_POSITION,
10256 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10257 DEVCAMDEBUG_AF_TOF_DISTANCE,
10258 DEVCAMDEBUG_AF_LUMA,
10259 DEVCAMDEBUG_AF_HAF_STATE,
10260 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10261 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10262 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10263 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10264 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10265 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10266 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10267 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10268 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10269 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10270 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10271 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10272 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10273 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10274 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10275 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10276 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10277 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10278 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10279 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10280 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10281 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10282 // DevCamDebug metadata result_keys AEC
10283 DEVCAMDEBUG_AEC_TARGET_LUMA,
10284 DEVCAMDEBUG_AEC_COMP_LUMA,
10285 DEVCAMDEBUG_AEC_AVG_LUMA,
10286 DEVCAMDEBUG_AEC_CUR_LUMA,
10287 DEVCAMDEBUG_AEC_LINECOUNT,
10288 DEVCAMDEBUG_AEC_REAL_GAIN,
10289 DEVCAMDEBUG_AEC_EXP_INDEX,
10290 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010291 // DevCamDebug metadata result_keys zzHDR
10292 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10293 DEVCAMDEBUG_AEC_L_LINECOUNT,
10294 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10295 DEVCAMDEBUG_AEC_S_LINECOUNT,
10296 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10297 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10298 // DevCamDebug metadata result_keys ADRC
10299 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10300 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10301 DEVCAMDEBUG_AEC_GTM_RATIO,
10302 DEVCAMDEBUG_AEC_LTM_RATIO,
10303 DEVCAMDEBUG_AEC_LA_RATIO,
10304 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010305 // DevCamDebug metadata result_keys AEC MOTION
10306 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10307 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10308 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010309 // DevCamDebug metadata result_keys AWB
10310 DEVCAMDEBUG_AWB_R_GAIN,
10311 DEVCAMDEBUG_AWB_G_GAIN,
10312 DEVCAMDEBUG_AWB_B_GAIN,
10313 DEVCAMDEBUG_AWB_CCT,
10314 DEVCAMDEBUG_AWB_DECISION,
10315 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010316 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10317 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10318 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010319 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010320 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010321 };
10322
Thierry Strudel3d639192016-09-09 11:52:26 -070010323 size_t result_keys_cnt =
10324 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10325
10326 Vector<int32_t> available_result_keys;
10327 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10328 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10329 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10330 }
10331 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10332 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10333 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10334 }
10335 if (supportedFaceDetectMode == 1) {
10336 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10337 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10338 } else if ((supportedFaceDetectMode == 2) ||
10339 (supportedFaceDetectMode == 3)) {
10340 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10341 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10342 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010343#ifndef USE_HAL_3_3
10344 if (hasBlackRegions) {
10345 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10346 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10347 }
10348#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010349
10350 if (gExposeEnableZslKey) {
10351 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10352 }
10353
Thierry Strudel3d639192016-09-09 11:52:26 -070010354 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10355 available_result_keys.array(), available_result_keys.size());
10356
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010357 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010358 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10359 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10360 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10361 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10362 ANDROID_SCALER_CROPPING_TYPE,
10363 ANDROID_SYNC_MAX_LATENCY,
10364 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10365 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10366 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10367 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10368 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10369 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10370 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10371 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10372 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10373 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10374 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10375 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10376 ANDROID_LENS_FACING,
10377 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10378 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10379 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10380 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10381 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10382 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10383 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10384 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10385 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10386 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10387 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10388 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10389 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10390 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10391 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10392 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10393 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10394 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10395 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10396 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010397 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010398 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10399 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10400 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10401 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10402 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10403 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10404 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10405 ANDROID_CONTROL_AVAILABLE_MODES,
10406 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10407 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10408 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10409 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010410 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10411#ifndef USE_HAL_3_3
10412 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10413 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10414#endif
10415 };
10416
10417 Vector<int32_t> available_characteristics_keys;
10418 available_characteristics_keys.appendArray(characteristics_keys_basic,
10419 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10420#ifndef USE_HAL_3_3
10421 if (hasBlackRegions) {
10422 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10423 }
10424#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010425
10426 if (0 <= indexPD) {
10427 int32_t depthKeys[] = {
10428 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10429 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10430 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10431 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10432 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10433 };
10434 available_characteristics_keys.appendArray(depthKeys,
10435 sizeof(depthKeys) / sizeof(depthKeys[0]));
10436 }
10437
Thierry Strudel3d639192016-09-09 11:52:26 -070010438 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010439 available_characteristics_keys.array(),
10440 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010441
10442 /*available stall durations depend on the hw + sw and will be different for different devices */
10443 /*have to add for raw after implementation*/
10444 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10445 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10446
10447 Vector<int64_t> available_stall_durations;
10448 for (uint32_t j = 0; j < stall_formats_count; j++) {
10449 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10450 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10451 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10452 available_stall_durations.add(stall_formats[j]);
10453 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10454 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10455 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10456 }
10457 } else {
10458 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10459 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10460 available_stall_durations.add(stall_formats[j]);
10461 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10462 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10463 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10464 }
10465 }
10466 }
10467 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10468 available_stall_durations.array(),
10469 available_stall_durations.size());
10470
10471 //QCAMERA3_OPAQUE_RAW
10472 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10473 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10474 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10475 case LEGACY_RAW:
10476 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10477 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10478 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10479 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10480 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10481 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10482 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10483 break;
10484 case MIPI_RAW:
10485 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10486 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10487 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10488 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10489 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10490 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10491 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10492 break;
10493 default:
10494 LOGE("unknown opaque_raw_format %d",
10495 gCamCapability[cameraId]->opaque_raw_fmt);
10496 break;
10497 }
10498 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10499
10500 Vector<int32_t> strides;
10501 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10502 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10503 cam_stream_buf_plane_info_t buf_planes;
10504 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10505 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10506 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10507 &gCamCapability[cameraId]->padding_info, &buf_planes);
10508 strides.add(buf_planes.plane_info.mp[0].stride);
10509 }
10510 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10511 strides.size());
10512
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010513 //TBD: remove the following line once backend advertises zzHDR in feature mask
10514 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010515 //Video HDR default
10516 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10517 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010518 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010519 int32_t vhdr_mode[] = {
10520 QCAMERA3_VIDEO_HDR_MODE_OFF,
10521 QCAMERA3_VIDEO_HDR_MODE_ON};
10522
10523 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10524 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10525 vhdr_mode, vhdr_mode_count);
10526 }
10527
Thierry Strudel3d639192016-09-09 11:52:26 -070010528 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10529 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10530 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10531
10532 uint8_t isMonoOnly =
10533 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10534 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10535 &isMonoOnly, 1);
10536
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010537#ifndef USE_HAL_3_3
10538 Vector<int32_t> opaque_size;
10539 for (size_t j = 0; j < scalar_formats_count; j++) {
10540 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10541 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10542 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10543 cam_stream_buf_plane_info_t buf_planes;
10544
10545 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10546 &gCamCapability[cameraId]->padding_info, &buf_planes);
10547
10548 if (rc == 0) {
10549 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10550 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10551 opaque_size.add(buf_planes.plane_info.frame_len);
10552 }else {
10553 LOGE("raw frame calculation failed!");
10554 }
10555 }
10556 }
10557 }
10558
10559 if ((opaque_size.size() > 0) &&
10560 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10561 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10562 else
10563 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10564#endif
10565
Thierry Strudel04e026f2016-10-10 11:27:36 -070010566 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10567 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10568 size = 0;
10569 count = CAM_IR_MODE_MAX;
10570 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10571 for (size_t i = 0; i < count; i++) {
10572 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10573 gCamCapability[cameraId]->supported_ir_modes[i]);
10574 if (NAME_NOT_FOUND != val) {
10575 avail_ir_modes[size] = (int32_t)val;
10576 size++;
10577 }
10578 }
10579 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10580 avail_ir_modes, size);
10581 }
10582
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010583 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10584 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10585 size = 0;
10586 count = CAM_AEC_CONVERGENCE_MAX;
10587 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10588 for (size_t i = 0; i < count; i++) {
10589 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10590 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10591 if (NAME_NOT_FOUND != val) {
10592 available_instant_aec_modes[size] = (int32_t)val;
10593 size++;
10594 }
10595 }
10596 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10597 available_instant_aec_modes, size);
10598 }
10599
Thierry Strudel54dc9782017-02-15 12:12:10 -080010600 int32_t sharpness_range[] = {
10601 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10602 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10603 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10604
10605 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10606 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10607 size = 0;
10608 count = CAM_BINNING_CORRECTION_MODE_MAX;
10609 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10610 for (size_t i = 0; i < count; i++) {
10611 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10612 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10613 gCamCapability[cameraId]->supported_binning_modes[i]);
10614 if (NAME_NOT_FOUND != val) {
10615 avail_binning_modes[size] = (int32_t)val;
10616 size++;
10617 }
10618 }
10619 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10620 avail_binning_modes, size);
10621 }
10622
10623 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10624 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10625 size = 0;
10626 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10627 for (size_t i = 0; i < count; i++) {
10628 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10629 gCamCapability[cameraId]->supported_aec_modes[i]);
10630 if (NAME_NOT_FOUND != val)
10631 available_aec_modes[size++] = val;
10632 }
10633 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10634 available_aec_modes, size);
10635 }
10636
10637 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10638 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10639 size = 0;
10640 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10641 for (size_t i = 0; i < count; i++) {
10642 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10643 gCamCapability[cameraId]->supported_iso_modes[i]);
10644 if (NAME_NOT_FOUND != val)
10645 available_iso_modes[size++] = val;
10646 }
10647 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10648 available_iso_modes, size);
10649 }
10650
10651 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010652 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010653 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10654 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10655 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10656
10657 int32_t available_saturation_range[4];
10658 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10659 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10660 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10661 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10662 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10663 available_saturation_range, 4);
10664
10665 uint8_t is_hdr_values[2];
10666 is_hdr_values[0] = 0;
10667 is_hdr_values[1] = 1;
10668 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10669 is_hdr_values, 2);
10670
10671 float is_hdr_confidence_range[2];
10672 is_hdr_confidence_range[0] = 0.0;
10673 is_hdr_confidence_range[1] = 1.0;
10674 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10675 is_hdr_confidence_range, 2);
10676
Emilian Peev0a972ef2017-03-16 10:25:53 +000010677 size_t eepromLength = strnlen(
10678 reinterpret_cast<const char *>(
10679 gCamCapability[cameraId]->eeprom_version_info),
10680 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10681 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010682 char easelInfo[] = ",E:N";
10683 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10684 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10685 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010686 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
10687 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E:Y" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010688 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010689 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010690 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10691 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10692 }
10693
Thierry Strudel3d639192016-09-09 11:52:26 -070010694 gStaticMetadata[cameraId] = staticInfo.release();
10695 return rc;
10696}
10697
10698/*===========================================================================
10699 * FUNCTION : makeTable
10700 *
10701 * DESCRIPTION: make a table of sizes
10702 *
10703 * PARAMETERS :
10704 *
10705 *
10706 *==========================================================================*/
10707void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10708 size_t max_size, int32_t *sizeTable)
10709{
10710 size_t j = 0;
10711 if (size > max_size) {
10712 size = max_size;
10713 }
10714 for (size_t i = 0; i < size; i++) {
10715 sizeTable[j] = dimTable[i].width;
10716 sizeTable[j+1] = dimTable[i].height;
10717 j+=2;
10718 }
10719}
10720
10721/*===========================================================================
10722 * FUNCTION : makeFPSTable
10723 *
10724 * DESCRIPTION: make a table of fps ranges
10725 *
10726 * PARAMETERS :
10727 *
10728 *==========================================================================*/
10729void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10730 size_t max_size, int32_t *fpsRangesTable)
10731{
10732 size_t j = 0;
10733 if (size > max_size) {
10734 size = max_size;
10735 }
10736 for (size_t i = 0; i < size; i++) {
10737 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10738 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10739 j+=2;
10740 }
10741}
10742
10743/*===========================================================================
10744 * FUNCTION : makeOverridesList
10745 *
10746 * DESCRIPTION: make a list of scene mode overrides
10747 *
10748 * PARAMETERS :
10749 *
10750 *
10751 *==========================================================================*/
10752void QCamera3HardwareInterface::makeOverridesList(
10753 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10754 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10755{
10756 /*daemon will give a list of overrides for all scene modes.
10757 However we should send the fwk only the overrides for the scene modes
10758 supported by the framework*/
10759 size_t j = 0;
10760 if (size > max_size) {
10761 size = max_size;
10762 }
10763 size_t focus_count = CAM_FOCUS_MODE_MAX;
10764 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10765 focus_count);
10766 for (size_t i = 0; i < size; i++) {
10767 bool supt = false;
10768 size_t index = supported_indexes[i];
10769 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10770 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10771 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10772 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10773 overridesTable[index].awb_mode);
10774 if (NAME_NOT_FOUND != val) {
10775 overridesList[j+1] = (uint8_t)val;
10776 }
10777 uint8_t focus_override = overridesTable[index].af_mode;
10778 for (size_t k = 0; k < focus_count; k++) {
10779 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10780 supt = true;
10781 break;
10782 }
10783 }
10784 if (supt) {
10785 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10786 focus_override);
10787 if (NAME_NOT_FOUND != val) {
10788 overridesList[j+2] = (uint8_t)val;
10789 }
10790 } else {
10791 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10792 }
10793 j+=3;
10794 }
10795}
10796
10797/*===========================================================================
10798 * FUNCTION : filterJpegSizes
10799 *
10800 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10801 * could be downscaled to
10802 *
10803 * PARAMETERS :
10804 *
10805 * RETURN : length of jpegSizes array
10806 *==========================================================================*/
10807
10808size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10809 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10810 uint8_t downscale_factor)
10811{
10812 if (0 == downscale_factor) {
10813 downscale_factor = 1;
10814 }
10815
10816 int32_t min_width = active_array_size.width / downscale_factor;
10817 int32_t min_height = active_array_size.height / downscale_factor;
10818 size_t jpegSizesCnt = 0;
10819 if (processedSizesCnt > maxCount) {
10820 processedSizesCnt = maxCount;
10821 }
10822 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10823 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10824 jpegSizes[jpegSizesCnt] = processedSizes[i];
10825 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10826 jpegSizesCnt += 2;
10827 }
10828 }
10829 return jpegSizesCnt;
10830}
10831
10832/*===========================================================================
10833 * FUNCTION : computeNoiseModelEntryS
10834 *
10835 * DESCRIPTION: function to map a given sensitivity to the S noise
10836 * model parameters in the DNG noise model.
10837 *
10838 * PARAMETERS : sens : the sensor sensitivity
10839 *
10840 ** RETURN : S (sensor amplification) noise
10841 *
10842 *==========================================================================*/
10843double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10844 double s = gCamCapability[mCameraId]->gradient_S * sens +
10845 gCamCapability[mCameraId]->offset_S;
10846 return ((s < 0.0) ? 0.0 : s);
10847}
10848
10849/*===========================================================================
10850 * FUNCTION : computeNoiseModelEntryO
10851 *
10852 * DESCRIPTION: function to map a given sensitivity to the O noise
10853 * model parameters in the DNG noise model.
10854 *
10855 * PARAMETERS : sens : the sensor sensitivity
10856 *
10857 ** RETURN : O (sensor readout) noise
10858 *
10859 *==========================================================================*/
10860double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10861 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10862 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10863 1.0 : (1.0 * sens / max_analog_sens);
10864 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10865 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10866 return ((o < 0.0) ? 0.0 : o);
10867}
10868
10869/*===========================================================================
10870 * FUNCTION : getSensorSensitivity
10871 *
10872 * DESCRIPTION: convert iso_mode to an integer value
10873 *
10874 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10875 *
10876 ** RETURN : sensitivity supported by sensor
10877 *
10878 *==========================================================================*/
10879int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10880{
10881 int32_t sensitivity;
10882
10883 switch (iso_mode) {
10884 case CAM_ISO_MODE_100:
10885 sensitivity = 100;
10886 break;
10887 case CAM_ISO_MODE_200:
10888 sensitivity = 200;
10889 break;
10890 case CAM_ISO_MODE_400:
10891 sensitivity = 400;
10892 break;
10893 case CAM_ISO_MODE_800:
10894 sensitivity = 800;
10895 break;
10896 case CAM_ISO_MODE_1600:
10897 sensitivity = 1600;
10898 break;
10899 default:
10900 sensitivity = -1;
10901 break;
10902 }
10903 return sensitivity;
10904}
10905
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010906int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010907 if (gEaselManagerClient == nullptr) {
10908 gEaselManagerClient = EaselManagerClient::create();
10909 if (gEaselManagerClient == nullptr) {
10910 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10911 return -ENODEV;
10912 }
10913 }
10914
10915 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010916 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10917 // to connect to Easel.
10918 bool doNotpowerOnEasel =
10919 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10920
10921 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010922 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10923 return OK;
10924 }
10925
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010926 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010927 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010928 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010929 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010930 return res;
10931 }
10932
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010933 EaselManagerClientOpened = true;
10934
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010935 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010936 if (res != OK) {
10937 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10938 }
10939
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010940 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010941 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010942
10943 // Expose enableZsl key only when HDR+ mode is enabled.
10944 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010945 }
10946
10947 return OK;
10948}
10949
Thierry Strudel3d639192016-09-09 11:52:26 -070010950/*===========================================================================
10951 * FUNCTION : getCamInfo
10952 *
10953 * DESCRIPTION: query camera capabilities
10954 *
10955 * PARAMETERS :
10956 * @cameraId : camera Id
10957 * @info : camera info struct to be filled in with camera capabilities
10958 *
10959 * RETURN : int type of status
10960 * NO_ERROR -- success
10961 * none-zero failure code
10962 *==========================================================================*/
10963int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10964 struct camera_info *info)
10965{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010966 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010967 int rc = 0;
10968
10969 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010970
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010971 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070010972 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010973 rc = initHdrPlusClientLocked();
10974 if (rc != OK) {
10975 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10976 pthread_mutex_unlock(&gCamLock);
10977 return rc;
10978 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010979 }
10980
Thierry Strudel3d639192016-09-09 11:52:26 -070010981 if (NULL == gCamCapability[cameraId]) {
10982 rc = initCapabilities(cameraId);
10983 if (rc < 0) {
10984 pthread_mutex_unlock(&gCamLock);
10985 return rc;
10986 }
10987 }
10988
10989 if (NULL == gStaticMetadata[cameraId]) {
10990 rc = initStaticMetadata(cameraId);
10991 if (rc < 0) {
10992 pthread_mutex_unlock(&gCamLock);
10993 return rc;
10994 }
10995 }
10996
10997 switch(gCamCapability[cameraId]->position) {
10998 case CAM_POSITION_BACK:
10999 case CAM_POSITION_BACK_AUX:
11000 info->facing = CAMERA_FACING_BACK;
11001 break;
11002
11003 case CAM_POSITION_FRONT:
11004 case CAM_POSITION_FRONT_AUX:
11005 info->facing = CAMERA_FACING_FRONT;
11006 break;
11007
11008 default:
11009 LOGE("Unknown position type %d for camera id:%d",
11010 gCamCapability[cameraId]->position, cameraId);
11011 rc = -1;
11012 break;
11013 }
11014
11015
11016 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011017#ifndef USE_HAL_3_3
11018 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11019#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011020 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011021#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011022 info->static_camera_characteristics = gStaticMetadata[cameraId];
11023
11024 //For now assume both cameras can operate independently.
11025 info->conflicting_devices = NULL;
11026 info->conflicting_devices_length = 0;
11027
11028 //resource cost is 100 * MIN(1.0, m/M),
11029 //where m is throughput requirement with maximum stream configuration
11030 //and M is CPP maximum throughput.
11031 float max_fps = 0.0;
11032 for (uint32_t i = 0;
11033 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11034 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11035 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11036 }
11037 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11038 gCamCapability[cameraId]->active_array_size.width *
11039 gCamCapability[cameraId]->active_array_size.height * max_fps /
11040 gCamCapability[cameraId]->max_pixel_bandwidth;
11041 info->resource_cost = 100 * MIN(1.0, ratio);
11042 LOGI("camera %d resource cost is %d", cameraId,
11043 info->resource_cost);
11044
11045 pthread_mutex_unlock(&gCamLock);
11046 return rc;
11047}
11048
11049/*===========================================================================
11050 * FUNCTION : translateCapabilityToMetadata
11051 *
11052 * DESCRIPTION: translate the capability into camera_metadata_t
11053 *
11054 * PARAMETERS : type of the request
11055 *
11056 *
11057 * RETURN : success: camera_metadata_t*
11058 * failure: NULL
11059 *
11060 *==========================================================================*/
11061camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11062{
11063 if (mDefaultMetadata[type] != NULL) {
11064 return mDefaultMetadata[type];
11065 }
11066 //first time we are handling this request
11067 //fill up the metadata structure using the wrapper class
11068 CameraMetadata settings;
11069 //translate from cam_capability_t to camera_metadata_tag_t
11070 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11071 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11072 int32_t defaultRequestID = 0;
11073 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11074
11075 /* OIS disable */
11076 char ois_prop[PROPERTY_VALUE_MAX];
11077 memset(ois_prop, 0, sizeof(ois_prop));
11078 property_get("persist.camera.ois.disable", ois_prop, "0");
11079 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11080
11081 /* Force video to use OIS */
11082 char videoOisProp[PROPERTY_VALUE_MAX];
11083 memset(videoOisProp, 0, sizeof(videoOisProp));
11084 property_get("persist.camera.ois.video", videoOisProp, "1");
11085 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011086
11087 // Hybrid AE enable/disable
11088 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11089 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11090 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
11091 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
11092
Thierry Strudel3d639192016-09-09 11:52:26 -070011093 uint8_t controlIntent = 0;
11094 uint8_t focusMode;
11095 uint8_t vsMode;
11096 uint8_t optStabMode;
11097 uint8_t cacMode;
11098 uint8_t edge_mode;
11099 uint8_t noise_red_mode;
11100 uint8_t tonemap_mode;
11101 bool highQualityModeEntryAvailable = FALSE;
11102 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011103 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011104 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11105 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011106 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011107 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011108 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011109
Thierry Strudel3d639192016-09-09 11:52:26 -070011110 switch (type) {
11111 case CAMERA3_TEMPLATE_PREVIEW:
11112 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11113 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11114 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11115 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11116 edge_mode = ANDROID_EDGE_MODE_FAST;
11117 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11118 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11119 break;
11120 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11121 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11122 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11123 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11124 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11125 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11126 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11127 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11128 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11129 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11130 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11131 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11132 highQualityModeEntryAvailable = TRUE;
11133 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11134 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11135 fastModeEntryAvailable = TRUE;
11136 }
11137 }
11138 if (highQualityModeEntryAvailable) {
11139 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11140 } else if (fastModeEntryAvailable) {
11141 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11142 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011143 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11144 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11145 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011146 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011147 break;
11148 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11149 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11150 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11151 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011152 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11153 edge_mode = ANDROID_EDGE_MODE_FAST;
11154 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11155 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11156 if (forceVideoOis)
11157 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11158 break;
11159 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11160 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11161 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11162 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011163 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11164 edge_mode = ANDROID_EDGE_MODE_FAST;
11165 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11166 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11167 if (forceVideoOis)
11168 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11169 break;
11170 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11171 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11172 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11173 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11174 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11175 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11176 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11177 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11178 break;
11179 case CAMERA3_TEMPLATE_MANUAL:
11180 edge_mode = ANDROID_EDGE_MODE_FAST;
11181 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11182 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11183 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11184 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11185 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11186 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11187 break;
11188 default:
11189 edge_mode = ANDROID_EDGE_MODE_FAST;
11190 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11191 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11192 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11193 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11194 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11195 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11196 break;
11197 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011198 // Set CAC to OFF if underlying device doesn't support
11199 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11200 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11201 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011202 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11203 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11204 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11205 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11206 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11207 }
11208 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011209 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011210 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011211
11212 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11213 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11214 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11215 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11216 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11217 || ois_disable)
11218 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11219 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011220 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011221
11222 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11223 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11224
11225 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11226 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11227
11228 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11229 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11230
11231 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11232 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11233
11234 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11235 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11236
11237 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11238 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11239
11240 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11241 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11242
11243 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11244 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11245
11246 /*flash*/
11247 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11248 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11249
11250 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11251 settings.update(ANDROID_FLASH_FIRING_POWER,
11252 &flashFiringLevel, 1);
11253
11254 /* lens */
11255 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11256 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11257
11258 if (gCamCapability[mCameraId]->filter_densities_count) {
11259 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11260 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11261 gCamCapability[mCameraId]->filter_densities_count);
11262 }
11263
11264 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11265 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11266
Thierry Strudel3d639192016-09-09 11:52:26 -070011267 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11268 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11269
11270 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11271 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11272
11273 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11274 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11275
11276 /* face detection (default to OFF) */
11277 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11278 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11279
Thierry Strudel54dc9782017-02-15 12:12:10 -080011280 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11281 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011282
11283 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11284 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11285
11286 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11287 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11288
Thierry Strudel3d639192016-09-09 11:52:26 -070011289
11290 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11291 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11292
11293 /* Exposure time(Update the Min Exposure Time)*/
11294 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11295 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11296
11297 /* frame duration */
11298 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11299 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11300
11301 /* sensitivity */
11302 static const int32_t default_sensitivity = 100;
11303 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011304#ifndef USE_HAL_3_3
11305 static const int32_t default_isp_sensitivity =
11306 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11307 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11308#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011309
11310 /*edge mode*/
11311 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11312
11313 /*noise reduction mode*/
11314 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11315
11316 /*color correction mode*/
11317 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11318 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11319
11320 /*transform matrix mode*/
11321 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11322
11323 int32_t scaler_crop_region[4];
11324 scaler_crop_region[0] = 0;
11325 scaler_crop_region[1] = 0;
11326 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11327 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11328 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11329
11330 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11331 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11332
11333 /*focus distance*/
11334 float focus_distance = 0.0;
11335 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11336
11337 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011338 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011339 float max_range = 0.0;
11340 float max_fixed_fps = 0.0;
11341 int32_t fps_range[2] = {0, 0};
11342 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11343 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011344 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11345 TEMPLATE_MAX_PREVIEW_FPS) {
11346 continue;
11347 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011348 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11349 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11350 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11351 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11352 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11353 if (range > max_range) {
11354 fps_range[0] =
11355 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11356 fps_range[1] =
11357 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11358 max_range = range;
11359 }
11360 } else {
11361 if (range < 0.01 && max_fixed_fps <
11362 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11363 fps_range[0] =
11364 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11365 fps_range[1] =
11366 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11367 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11368 }
11369 }
11370 }
11371 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11372
11373 /*precapture trigger*/
11374 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11375 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11376
11377 /*af trigger*/
11378 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11379 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11380
11381 /* ae & af regions */
11382 int32_t active_region[] = {
11383 gCamCapability[mCameraId]->active_array_size.left,
11384 gCamCapability[mCameraId]->active_array_size.top,
11385 gCamCapability[mCameraId]->active_array_size.left +
11386 gCamCapability[mCameraId]->active_array_size.width,
11387 gCamCapability[mCameraId]->active_array_size.top +
11388 gCamCapability[mCameraId]->active_array_size.height,
11389 0};
11390 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11391 sizeof(active_region) / sizeof(active_region[0]));
11392 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11393 sizeof(active_region) / sizeof(active_region[0]));
11394
11395 /* black level lock */
11396 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11397 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11398
Thierry Strudel3d639192016-09-09 11:52:26 -070011399 //special defaults for manual template
11400 if (type == CAMERA3_TEMPLATE_MANUAL) {
11401 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11402 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11403
11404 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11405 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11406
11407 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11408 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11409
11410 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11411 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11412
11413 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11414 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11415
11416 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11417 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11418 }
11419
11420
11421 /* TNR
11422 * We'll use this location to determine which modes TNR will be set.
11423 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11424 * This is not to be confused with linking on a per stream basis that decision
11425 * is still on per-session basis and will be handled as part of config stream
11426 */
11427 uint8_t tnr_enable = 0;
11428
11429 if (m_bTnrPreview || m_bTnrVideo) {
11430
11431 switch (type) {
11432 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11433 tnr_enable = 1;
11434 break;
11435
11436 default:
11437 tnr_enable = 0;
11438 break;
11439 }
11440
11441 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11442 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11443 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11444
11445 LOGD("TNR:%d with process plate %d for template:%d",
11446 tnr_enable, tnr_process_type, type);
11447 }
11448
11449 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011450 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011451 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11452
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011453 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011454 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11455
Shuzhen Wang920ea402017-05-03 08:49:39 -070011456 uint8_t related_camera_id = mCameraId;
11457 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011458
11459 /* CDS default */
11460 char prop[PROPERTY_VALUE_MAX];
11461 memset(prop, 0, sizeof(prop));
11462 property_get("persist.camera.CDS", prop, "Auto");
11463 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11464 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11465 if (CAM_CDS_MODE_MAX == cds_mode) {
11466 cds_mode = CAM_CDS_MODE_AUTO;
11467 }
11468
11469 /* Disabling CDS in templates which have TNR enabled*/
11470 if (tnr_enable)
11471 cds_mode = CAM_CDS_MODE_OFF;
11472
11473 int32_t mode = cds_mode;
11474 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011475
Thierry Strudel269c81a2016-10-12 12:13:59 -070011476 /* Manual Convergence AEC Speed is disabled by default*/
11477 float default_aec_speed = 0;
11478 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11479
11480 /* Manual Convergence AWB Speed is disabled by default*/
11481 float default_awb_speed = 0;
11482 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11483
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011484 // Set instant AEC to normal convergence by default
11485 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11486 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11487
Shuzhen Wang19463d72016-03-08 11:09:52 -080011488 /* hybrid ae */
11489 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11490
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011491 if (gExposeEnableZslKey) {
11492 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11493 }
11494
Thierry Strudel3d639192016-09-09 11:52:26 -070011495 mDefaultMetadata[type] = settings.release();
11496
11497 return mDefaultMetadata[type];
11498}
11499
11500/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011501 * FUNCTION : getExpectedFrameDuration
11502 *
11503 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11504 * duration
11505 *
11506 * PARAMETERS :
11507 * @request : request settings
11508 * @frameDuration : The maximum frame duration in nanoseconds
11509 *
11510 * RETURN : None
11511 *==========================================================================*/
11512void QCamera3HardwareInterface::getExpectedFrameDuration(
11513 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11514 if (nullptr == frameDuration) {
11515 return;
11516 }
11517
11518 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11519 find_camera_metadata_ro_entry(request,
11520 ANDROID_SENSOR_EXPOSURE_TIME,
11521 &e);
11522 if (e.count > 0) {
11523 *frameDuration = e.data.i64[0];
11524 }
11525 find_camera_metadata_ro_entry(request,
11526 ANDROID_SENSOR_FRAME_DURATION,
11527 &e);
11528 if (e.count > 0) {
11529 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11530 }
11531}
11532
11533/*===========================================================================
11534 * FUNCTION : calculateMaxExpectedDuration
11535 *
11536 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11537 * current camera settings.
11538 *
11539 * PARAMETERS :
11540 * @request : request settings
11541 *
11542 * RETURN : Expected frame duration in nanoseconds.
11543 *==========================================================================*/
11544nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11545 const camera_metadata_t *request) {
11546 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11547 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11548 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11549 if (e.count == 0) {
11550 return maxExpectedDuration;
11551 }
11552
11553 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11554 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11555 }
11556
11557 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11558 return maxExpectedDuration;
11559 }
11560
11561 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11562 if (e.count == 0) {
11563 return maxExpectedDuration;
11564 }
11565
11566 switch (e.data.u8[0]) {
11567 case ANDROID_CONTROL_AE_MODE_OFF:
11568 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11569 break;
11570 default:
11571 find_camera_metadata_ro_entry(request,
11572 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11573 &e);
11574 if (e.count > 1) {
11575 maxExpectedDuration = 1e9 / e.data.u8[0];
11576 }
11577 break;
11578 }
11579
11580 return maxExpectedDuration;
11581}
11582
11583/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011584 * FUNCTION : setFrameParameters
11585 *
11586 * DESCRIPTION: set parameters per frame as requested in the metadata from
11587 * framework
11588 *
11589 * PARAMETERS :
11590 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011591 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011592 * @blob_request: Whether this request is a blob request or not
11593 *
11594 * RETURN : success: NO_ERROR
11595 * failure:
11596 *==========================================================================*/
11597int QCamera3HardwareInterface::setFrameParameters(
11598 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011599 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011600 int blob_request,
11601 uint32_t snapshotStreamId)
11602{
11603 /*translate from camera_metadata_t type to parm_type_t*/
11604 int rc = 0;
11605 int32_t hal_version = CAM_HAL_V3;
11606
11607 clear_metadata_buffer(mParameters);
11608 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11609 LOGE("Failed to set hal version in the parameters");
11610 return BAD_VALUE;
11611 }
11612
11613 /*we need to update the frame number in the parameters*/
11614 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11615 request->frame_number)) {
11616 LOGE("Failed to set the frame number in the parameters");
11617 return BAD_VALUE;
11618 }
11619
11620 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011621 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011622 LOGE("Failed to set stream type mask in the parameters");
11623 return BAD_VALUE;
11624 }
11625
11626 if (mUpdateDebugLevel) {
11627 uint32_t dummyDebugLevel = 0;
11628 /* The value of dummyDebugLevel is irrelavent. On
11629 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11630 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11631 dummyDebugLevel)) {
11632 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11633 return BAD_VALUE;
11634 }
11635 mUpdateDebugLevel = false;
11636 }
11637
11638 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011639 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011640 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11641 if (blob_request)
11642 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11643 }
11644
11645 return rc;
11646}
11647
11648/*===========================================================================
11649 * FUNCTION : setReprocParameters
11650 *
11651 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11652 * return it.
11653 *
11654 * PARAMETERS :
11655 * @request : request that needs to be serviced
11656 *
11657 * RETURN : success: NO_ERROR
11658 * failure:
11659 *==========================================================================*/
11660int32_t QCamera3HardwareInterface::setReprocParameters(
11661 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11662 uint32_t snapshotStreamId)
11663{
11664 /*translate from camera_metadata_t type to parm_type_t*/
11665 int rc = 0;
11666
11667 if (NULL == request->settings){
11668 LOGE("Reprocess settings cannot be NULL");
11669 return BAD_VALUE;
11670 }
11671
11672 if (NULL == reprocParam) {
11673 LOGE("Invalid reprocessing metadata buffer");
11674 return BAD_VALUE;
11675 }
11676 clear_metadata_buffer(reprocParam);
11677
11678 /*we need to update the frame number in the parameters*/
11679 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11680 request->frame_number)) {
11681 LOGE("Failed to set the frame number in the parameters");
11682 return BAD_VALUE;
11683 }
11684
11685 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11686 if (rc < 0) {
11687 LOGE("Failed to translate reproc request");
11688 return rc;
11689 }
11690
11691 CameraMetadata frame_settings;
11692 frame_settings = request->settings;
11693 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11694 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11695 int32_t *crop_count =
11696 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11697 int32_t *crop_data =
11698 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11699 int32_t *roi_map =
11700 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11701 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11702 cam_crop_data_t crop_meta;
11703 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11704 crop_meta.num_of_streams = 1;
11705 crop_meta.crop_info[0].crop.left = crop_data[0];
11706 crop_meta.crop_info[0].crop.top = crop_data[1];
11707 crop_meta.crop_info[0].crop.width = crop_data[2];
11708 crop_meta.crop_info[0].crop.height = crop_data[3];
11709
11710 crop_meta.crop_info[0].roi_map.left =
11711 roi_map[0];
11712 crop_meta.crop_info[0].roi_map.top =
11713 roi_map[1];
11714 crop_meta.crop_info[0].roi_map.width =
11715 roi_map[2];
11716 crop_meta.crop_info[0].roi_map.height =
11717 roi_map[3];
11718
11719 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11720 rc = BAD_VALUE;
11721 }
11722 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11723 request->input_buffer->stream,
11724 crop_meta.crop_info[0].crop.left,
11725 crop_meta.crop_info[0].crop.top,
11726 crop_meta.crop_info[0].crop.width,
11727 crop_meta.crop_info[0].crop.height);
11728 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11729 request->input_buffer->stream,
11730 crop_meta.crop_info[0].roi_map.left,
11731 crop_meta.crop_info[0].roi_map.top,
11732 crop_meta.crop_info[0].roi_map.width,
11733 crop_meta.crop_info[0].roi_map.height);
11734 } else {
11735 LOGE("Invalid reprocess crop count %d!", *crop_count);
11736 }
11737 } else {
11738 LOGE("No crop data from matching output stream");
11739 }
11740
11741 /* These settings are not needed for regular requests so handle them specially for
11742 reprocess requests; information needed for EXIF tags */
11743 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11744 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11745 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11746 if (NAME_NOT_FOUND != val) {
11747 uint32_t flashMode = (uint32_t)val;
11748 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11749 rc = BAD_VALUE;
11750 }
11751 } else {
11752 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11753 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11754 }
11755 } else {
11756 LOGH("No flash mode in reprocess settings");
11757 }
11758
11759 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11760 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11761 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11762 rc = BAD_VALUE;
11763 }
11764 } else {
11765 LOGH("No flash state in reprocess settings");
11766 }
11767
11768 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11769 uint8_t *reprocessFlags =
11770 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11771 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11772 *reprocessFlags)) {
11773 rc = BAD_VALUE;
11774 }
11775 }
11776
Thierry Strudel54dc9782017-02-15 12:12:10 -080011777 // Add exif debug data to internal metadata
11778 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11779 mm_jpeg_debug_exif_params_t *debug_params =
11780 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11781 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11782 // AE
11783 if (debug_params->ae_debug_params_valid == TRUE) {
11784 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11785 debug_params->ae_debug_params);
11786 }
11787 // AWB
11788 if (debug_params->awb_debug_params_valid == TRUE) {
11789 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11790 debug_params->awb_debug_params);
11791 }
11792 // AF
11793 if (debug_params->af_debug_params_valid == TRUE) {
11794 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11795 debug_params->af_debug_params);
11796 }
11797 // ASD
11798 if (debug_params->asd_debug_params_valid == TRUE) {
11799 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11800 debug_params->asd_debug_params);
11801 }
11802 // Stats
11803 if (debug_params->stats_debug_params_valid == TRUE) {
11804 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11805 debug_params->stats_debug_params);
11806 }
11807 // BE Stats
11808 if (debug_params->bestats_debug_params_valid == TRUE) {
11809 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11810 debug_params->bestats_debug_params);
11811 }
11812 // BHIST
11813 if (debug_params->bhist_debug_params_valid == TRUE) {
11814 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11815 debug_params->bhist_debug_params);
11816 }
11817 // 3A Tuning
11818 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11819 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11820 debug_params->q3a_tuning_debug_params);
11821 }
11822 }
11823
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011824 // Add metadata which reprocess needs
11825 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11826 cam_reprocess_info_t *repro_info =
11827 (cam_reprocess_info_t *)frame_settings.find
11828 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011829 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011830 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011831 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011832 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011833 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011834 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011835 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011836 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011837 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011838 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011839 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011840 repro_info->pipeline_flip);
11841 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11842 repro_info->af_roi);
11843 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11844 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011845 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11846 CAM_INTF_PARM_ROTATION metadata then has been added in
11847 translateToHalMetadata. HAL need to keep this new rotation
11848 metadata. Otherwise, the old rotation info saved in the vendor tag
11849 would be used */
11850 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11851 CAM_INTF_PARM_ROTATION, reprocParam) {
11852 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11853 } else {
11854 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011855 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011856 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011857 }
11858
11859 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11860 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11861 roi.width and roi.height would be the final JPEG size.
11862 For now, HAL only checks this for reprocess request */
11863 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11864 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11865 uint8_t *enable =
11866 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11867 if (*enable == TRUE) {
11868 int32_t *crop_data =
11869 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11870 cam_stream_crop_info_t crop_meta;
11871 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11872 crop_meta.stream_id = 0;
11873 crop_meta.crop.left = crop_data[0];
11874 crop_meta.crop.top = crop_data[1];
11875 crop_meta.crop.width = crop_data[2];
11876 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011877 // The JPEG crop roi should match cpp output size
11878 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11879 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11880 crop_meta.roi_map.left = 0;
11881 crop_meta.roi_map.top = 0;
11882 crop_meta.roi_map.width = cpp_crop->crop.width;
11883 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011884 }
11885 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11886 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011887 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011888 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011889 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11890 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011891 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011892 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11893
11894 // Add JPEG scale information
11895 cam_dimension_t scale_dim;
11896 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11897 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11898 int32_t *roi =
11899 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11900 scale_dim.width = roi[2];
11901 scale_dim.height = roi[3];
11902 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11903 scale_dim);
11904 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11905 scale_dim.width, scale_dim.height, mCameraId);
11906 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011907 }
11908 }
11909
11910 return rc;
11911}
11912
11913/*===========================================================================
11914 * FUNCTION : saveRequestSettings
11915 *
11916 * DESCRIPTION: Add any settings that might have changed to the request settings
11917 * and save the settings to be applied on the frame
11918 *
11919 * PARAMETERS :
11920 * @jpegMetadata : the extracted and/or modified jpeg metadata
11921 * @request : request with initial settings
11922 *
11923 * RETURN :
11924 * camera_metadata_t* : pointer to the saved request settings
11925 *==========================================================================*/
11926camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11927 const CameraMetadata &jpegMetadata,
11928 camera3_capture_request_t *request)
11929{
11930 camera_metadata_t *resultMetadata;
11931 CameraMetadata camMetadata;
11932 camMetadata = request->settings;
11933
11934 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11935 int32_t thumbnail_size[2];
11936 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11937 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11938 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11939 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11940 }
11941
11942 if (request->input_buffer != NULL) {
11943 uint8_t reprocessFlags = 1;
11944 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11945 (uint8_t*)&reprocessFlags,
11946 sizeof(reprocessFlags));
11947 }
11948
11949 resultMetadata = camMetadata.release();
11950 return resultMetadata;
11951}
11952
11953/*===========================================================================
11954 * FUNCTION : setHalFpsRange
11955 *
11956 * DESCRIPTION: set FPS range parameter
11957 *
11958 *
11959 * PARAMETERS :
11960 * @settings : Metadata from framework
11961 * @hal_metadata: Metadata buffer
11962 *
11963 *
11964 * RETURN : success: NO_ERROR
11965 * failure:
11966 *==========================================================================*/
11967int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11968 metadata_buffer_t *hal_metadata)
11969{
11970 int32_t rc = NO_ERROR;
11971 cam_fps_range_t fps_range;
11972 fps_range.min_fps = (float)
11973 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11974 fps_range.max_fps = (float)
11975 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11976 fps_range.video_min_fps = fps_range.min_fps;
11977 fps_range.video_max_fps = fps_range.max_fps;
11978
11979 LOGD("aeTargetFpsRange fps: [%f %f]",
11980 fps_range.min_fps, fps_range.max_fps);
11981 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11982 * follows:
11983 * ---------------------------------------------------------------|
11984 * Video stream is absent in configure_streams |
11985 * (Camcorder preview before the first video record |
11986 * ---------------------------------------------------------------|
11987 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11988 * | | | vid_min/max_fps|
11989 * ---------------------------------------------------------------|
11990 * NO | [ 30, 240] | 240 | [240, 240] |
11991 * |-------------|-------------|----------------|
11992 * | [240, 240] | 240 | [240, 240] |
11993 * ---------------------------------------------------------------|
11994 * Video stream is present in configure_streams |
11995 * ---------------------------------------------------------------|
11996 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11997 * | | | vid_min/max_fps|
11998 * ---------------------------------------------------------------|
11999 * NO | [ 30, 240] | 240 | [240, 240] |
12000 * (camcorder prev |-------------|-------------|----------------|
12001 * after video rec | [240, 240] | 240 | [240, 240] |
12002 * is stopped) | | | |
12003 * ---------------------------------------------------------------|
12004 * YES | [ 30, 240] | 240 | [240, 240] |
12005 * |-------------|-------------|----------------|
12006 * | [240, 240] | 240 | [240, 240] |
12007 * ---------------------------------------------------------------|
12008 * When Video stream is absent in configure_streams,
12009 * preview fps = sensor_fps / batchsize
12010 * Eg: for 240fps at batchSize 4, preview = 60fps
12011 * for 120fps at batchSize 4, preview = 30fps
12012 *
12013 * When video stream is present in configure_streams, preview fps is as per
12014 * the ratio of preview buffers to video buffers requested in process
12015 * capture request
12016 */
12017 mBatchSize = 0;
12018 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12019 fps_range.min_fps = fps_range.video_max_fps;
12020 fps_range.video_min_fps = fps_range.video_max_fps;
12021 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12022 fps_range.max_fps);
12023 if (NAME_NOT_FOUND != val) {
12024 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12025 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12026 return BAD_VALUE;
12027 }
12028
12029 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12030 /* If batchmode is currently in progress and the fps changes,
12031 * set the flag to restart the sensor */
12032 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12033 (mHFRVideoFps != fps_range.max_fps)) {
12034 mNeedSensorRestart = true;
12035 }
12036 mHFRVideoFps = fps_range.max_fps;
12037 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12038 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12039 mBatchSize = MAX_HFR_BATCH_SIZE;
12040 }
12041 }
12042 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12043
12044 }
12045 } else {
12046 /* HFR mode is session param in backend/ISP. This should be reset when
12047 * in non-HFR mode */
12048 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12049 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12050 return BAD_VALUE;
12051 }
12052 }
12053 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12054 return BAD_VALUE;
12055 }
12056 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12057 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12058 return rc;
12059}
12060
12061/*===========================================================================
12062 * FUNCTION : translateToHalMetadata
12063 *
12064 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12065 *
12066 *
12067 * PARAMETERS :
12068 * @request : request sent from framework
12069 *
12070 *
12071 * RETURN : success: NO_ERROR
12072 * failure:
12073 *==========================================================================*/
12074int QCamera3HardwareInterface::translateToHalMetadata
12075 (const camera3_capture_request_t *request,
12076 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012077 uint32_t snapshotStreamId) {
12078 if (request == nullptr || hal_metadata == nullptr) {
12079 return BAD_VALUE;
12080 }
12081
12082 int64_t minFrameDuration = getMinFrameDuration(request);
12083
12084 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12085 minFrameDuration);
12086}
12087
12088int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12089 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12090 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12091
Thierry Strudel3d639192016-09-09 11:52:26 -070012092 int rc = 0;
12093 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012094 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012095
12096 /* Do not change the order of the following list unless you know what you are
12097 * doing.
12098 * The order is laid out in such a way that parameters in the front of the table
12099 * may be used to override the parameters later in the table. Examples are:
12100 * 1. META_MODE should precede AEC/AWB/AF MODE
12101 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12102 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12103 * 4. Any mode should precede it's corresponding settings
12104 */
12105 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12106 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12107 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12108 rc = BAD_VALUE;
12109 }
12110 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12111 if (rc != NO_ERROR) {
12112 LOGE("extractSceneMode failed");
12113 }
12114 }
12115
12116 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12117 uint8_t fwk_aeMode =
12118 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12119 uint8_t aeMode;
12120 int32_t redeye;
12121
12122 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12123 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012124 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12125 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012126 } else {
12127 aeMode = CAM_AE_MODE_ON;
12128 }
12129 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12130 redeye = 1;
12131 } else {
12132 redeye = 0;
12133 }
12134
12135 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12136 fwk_aeMode);
12137 if (NAME_NOT_FOUND != val) {
12138 int32_t flashMode = (int32_t)val;
12139 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12140 }
12141
12142 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12143 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12144 rc = BAD_VALUE;
12145 }
12146 }
12147
12148 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12149 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12150 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12151 fwk_whiteLevel);
12152 if (NAME_NOT_FOUND != val) {
12153 uint8_t whiteLevel = (uint8_t)val;
12154 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12155 rc = BAD_VALUE;
12156 }
12157 }
12158 }
12159
12160 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12161 uint8_t fwk_cacMode =
12162 frame_settings.find(
12163 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12164 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12165 fwk_cacMode);
12166 if (NAME_NOT_FOUND != val) {
12167 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12168 bool entryAvailable = FALSE;
12169 // Check whether Frameworks set CAC mode is supported in device or not
12170 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12171 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12172 entryAvailable = TRUE;
12173 break;
12174 }
12175 }
12176 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12177 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12178 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12179 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12180 if (entryAvailable == FALSE) {
12181 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12182 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12183 } else {
12184 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12185 // High is not supported and so set the FAST as spec say's underlying
12186 // device implementation can be the same for both modes.
12187 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12188 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12189 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12190 // in order to avoid the fps drop due to high quality
12191 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12192 } else {
12193 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12194 }
12195 }
12196 }
12197 LOGD("Final cacMode is %d", cacMode);
12198 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12199 rc = BAD_VALUE;
12200 }
12201 } else {
12202 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12203 }
12204 }
12205
Jason Lee84ae9972017-02-24 13:24:24 -080012206 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012207 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012208 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012209 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012210 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12211 fwk_focusMode);
12212 if (NAME_NOT_FOUND != val) {
12213 uint8_t focusMode = (uint8_t)val;
12214 LOGD("set focus mode %d", focusMode);
12215 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12216 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12217 rc = BAD_VALUE;
12218 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012219 }
12220 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012221 } else {
12222 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12223 LOGE("Focus forced to infinity %d", focusMode);
12224 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12225 rc = BAD_VALUE;
12226 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012227 }
12228
Jason Lee84ae9972017-02-24 13:24:24 -080012229 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12230 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012231 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12232 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12233 focalDistance)) {
12234 rc = BAD_VALUE;
12235 }
12236 }
12237
12238 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12239 uint8_t fwk_antibandingMode =
12240 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12241 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12242 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12243 if (NAME_NOT_FOUND != val) {
12244 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012245 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12246 if (m60HzZone) {
12247 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12248 } else {
12249 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12250 }
12251 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012252 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12253 hal_antibandingMode)) {
12254 rc = BAD_VALUE;
12255 }
12256 }
12257 }
12258
12259 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12260 int32_t expCompensation = frame_settings.find(
12261 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12262 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12263 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12264 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12265 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012266 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012267 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12268 expCompensation)) {
12269 rc = BAD_VALUE;
12270 }
12271 }
12272
12273 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12274 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12275 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12276 rc = BAD_VALUE;
12277 }
12278 }
12279 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12280 rc = setHalFpsRange(frame_settings, hal_metadata);
12281 if (rc != NO_ERROR) {
12282 LOGE("setHalFpsRange failed");
12283 }
12284 }
12285
12286 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12287 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12288 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12289 rc = BAD_VALUE;
12290 }
12291 }
12292
12293 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12294 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12295 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12296 fwk_effectMode);
12297 if (NAME_NOT_FOUND != val) {
12298 uint8_t effectMode = (uint8_t)val;
12299 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12300 rc = BAD_VALUE;
12301 }
12302 }
12303 }
12304
12305 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12306 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12307 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12308 colorCorrectMode)) {
12309 rc = BAD_VALUE;
12310 }
12311 }
12312
12313 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12314 cam_color_correct_gains_t colorCorrectGains;
12315 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12316 colorCorrectGains.gains[i] =
12317 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12318 }
12319 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12320 colorCorrectGains)) {
12321 rc = BAD_VALUE;
12322 }
12323 }
12324
12325 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12326 cam_color_correct_matrix_t colorCorrectTransform;
12327 cam_rational_type_t transform_elem;
12328 size_t num = 0;
12329 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12330 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12331 transform_elem.numerator =
12332 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12333 transform_elem.denominator =
12334 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12335 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12336 num++;
12337 }
12338 }
12339 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12340 colorCorrectTransform)) {
12341 rc = BAD_VALUE;
12342 }
12343 }
12344
12345 cam_trigger_t aecTrigger;
12346 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12347 aecTrigger.trigger_id = -1;
12348 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12349 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12350 aecTrigger.trigger =
12351 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12352 aecTrigger.trigger_id =
12353 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12354 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12355 aecTrigger)) {
12356 rc = BAD_VALUE;
12357 }
12358 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12359 aecTrigger.trigger, aecTrigger.trigger_id);
12360 }
12361
12362 /*af_trigger must come with a trigger id*/
12363 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12364 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12365 cam_trigger_t af_trigger;
12366 af_trigger.trigger =
12367 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12368 af_trigger.trigger_id =
12369 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12370 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12371 rc = BAD_VALUE;
12372 }
12373 LOGD("AfTrigger: %d AfTriggerID: %d",
12374 af_trigger.trigger, af_trigger.trigger_id);
12375 }
12376
12377 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12378 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12379 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12380 rc = BAD_VALUE;
12381 }
12382 }
12383 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12384 cam_edge_application_t edge_application;
12385 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012386
Thierry Strudel3d639192016-09-09 11:52:26 -070012387 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12388 edge_application.sharpness = 0;
12389 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012390 edge_application.sharpness =
12391 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12392 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12393 int32_t sharpness =
12394 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12395 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12396 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12397 LOGD("Setting edge mode sharpness %d", sharpness);
12398 edge_application.sharpness = sharpness;
12399 }
12400 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012401 }
12402 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12403 rc = BAD_VALUE;
12404 }
12405 }
12406
12407 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12408 int32_t respectFlashMode = 1;
12409 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12410 uint8_t fwk_aeMode =
12411 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012412 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12413 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12414 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012415 respectFlashMode = 0;
12416 LOGH("AE Mode controls flash, ignore android.flash.mode");
12417 }
12418 }
12419 if (respectFlashMode) {
12420 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12421 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12422 LOGH("flash mode after mapping %d", val);
12423 // To check: CAM_INTF_META_FLASH_MODE usage
12424 if (NAME_NOT_FOUND != val) {
12425 uint8_t flashMode = (uint8_t)val;
12426 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12427 rc = BAD_VALUE;
12428 }
12429 }
12430 }
12431 }
12432
12433 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12434 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12435 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12436 rc = BAD_VALUE;
12437 }
12438 }
12439
12440 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12441 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12442 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12443 flashFiringTime)) {
12444 rc = BAD_VALUE;
12445 }
12446 }
12447
12448 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12449 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12450 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12451 hotPixelMode)) {
12452 rc = BAD_VALUE;
12453 }
12454 }
12455
12456 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12457 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12458 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12459 lensAperture)) {
12460 rc = BAD_VALUE;
12461 }
12462 }
12463
12464 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12465 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12466 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12467 filterDensity)) {
12468 rc = BAD_VALUE;
12469 }
12470 }
12471
12472 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12473 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12474 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12475 focalLength)) {
12476 rc = BAD_VALUE;
12477 }
12478 }
12479
12480 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12481 uint8_t optStabMode =
12482 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12483 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12484 optStabMode)) {
12485 rc = BAD_VALUE;
12486 }
12487 }
12488
12489 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12490 uint8_t videoStabMode =
12491 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12492 LOGD("videoStabMode from APP = %d", videoStabMode);
12493 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12494 videoStabMode)) {
12495 rc = BAD_VALUE;
12496 }
12497 }
12498
12499
12500 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12501 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12502 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12503 noiseRedMode)) {
12504 rc = BAD_VALUE;
12505 }
12506 }
12507
12508 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12509 float reprocessEffectiveExposureFactor =
12510 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12511 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12512 reprocessEffectiveExposureFactor)) {
12513 rc = BAD_VALUE;
12514 }
12515 }
12516
12517 cam_crop_region_t scalerCropRegion;
12518 bool scalerCropSet = false;
12519 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12520 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12521 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12522 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12523 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12524
12525 // Map coordinate system from active array to sensor output.
12526 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12527 scalerCropRegion.width, scalerCropRegion.height);
12528
12529 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12530 scalerCropRegion)) {
12531 rc = BAD_VALUE;
12532 }
12533 scalerCropSet = true;
12534 }
12535
12536 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12537 int64_t sensorExpTime =
12538 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12539 LOGD("setting sensorExpTime %lld", sensorExpTime);
12540 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12541 sensorExpTime)) {
12542 rc = BAD_VALUE;
12543 }
12544 }
12545
12546 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12547 int64_t sensorFrameDuration =
12548 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012549 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12550 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12551 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12552 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12553 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12554 sensorFrameDuration)) {
12555 rc = BAD_VALUE;
12556 }
12557 }
12558
12559 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12560 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12561 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12562 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12563 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12564 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12565 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12566 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12567 sensorSensitivity)) {
12568 rc = BAD_VALUE;
12569 }
12570 }
12571
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012572#ifndef USE_HAL_3_3
12573 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12574 int32_t ispSensitivity =
12575 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12576 if (ispSensitivity <
12577 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12578 ispSensitivity =
12579 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12580 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12581 }
12582 if (ispSensitivity >
12583 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12584 ispSensitivity =
12585 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12586 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12587 }
12588 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12589 ispSensitivity)) {
12590 rc = BAD_VALUE;
12591 }
12592 }
12593#endif
12594
Thierry Strudel3d639192016-09-09 11:52:26 -070012595 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12596 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12597 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12598 rc = BAD_VALUE;
12599 }
12600 }
12601
12602 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12603 uint8_t fwk_facedetectMode =
12604 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12605
12606 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12607 fwk_facedetectMode);
12608
12609 if (NAME_NOT_FOUND != val) {
12610 uint8_t facedetectMode = (uint8_t)val;
12611 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12612 facedetectMode)) {
12613 rc = BAD_VALUE;
12614 }
12615 }
12616 }
12617
Thierry Strudel54dc9782017-02-15 12:12:10 -080012618 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012619 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012620 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012621 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12622 histogramMode)) {
12623 rc = BAD_VALUE;
12624 }
12625 }
12626
12627 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12628 uint8_t sharpnessMapMode =
12629 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12630 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12631 sharpnessMapMode)) {
12632 rc = BAD_VALUE;
12633 }
12634 }
12635
12636 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12637 uint8_t tonemapMode =
12638 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12639 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12640 rc = BAD_VALUE;
12641 }
12642 }
12643 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12644 /*All tonemap channels will have the same number of points*/
12645 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12646 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12647 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12648 cam_rgb_tonemap_curves tonemapCurves;
12649 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12650 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12651 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12652 tonemapCurves.tonemap_points_cnt,
12653 CAM_MAX_TONEMAP_CURVE_SIZE);
12654 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12655 }
12656
12657 /* ch0 = G*/
12658 size_t point = 0;
12659 cam_tonemap_curve_t tonemapCurveGreen;
12660 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12661 for (size_t j = 0; j < 2; j++) {
12662 tonemapCurveGreen.tonemap_points[i][j] =
12663 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12664 point++;
12665 }
12666 }
12667 tonemapCurves.curves[0] = tonemapCurveGreen;
12668
12669 /* ch 1 = B */
12670 point = 0;
12671 cam_tonemap_curve_t tonemapCurveBlue;
12672 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12673 for (size_t j = 0; j < 2; j++) {
12674 tonemapCurveBlue.tonemap_points[i][j] =
12675 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12676 point++;
12677 }
12678 }
12679 tonemapCurves.curves[1] = tonemapCurveBlue;
12680
12681 /* ch 2 = R */
12682 point = 0;
12683 cam_tonemap_curve_t tonemapCurveRed;
12684 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12685 for (size_t j = 0; j < 2; j++) {
12686 tonemapCurveRed.tonemap_points[i][j] =
12687 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12688 point++;
12689 }
12690 }
12691 tonemapCurves.curves[2] = tonemapCurveRed;
12692
12693 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12694 tonemapCurves)) {
12695 rc = BAD_VALUE;
12696 }
12697 }
12698
12699 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12700 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12701 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12702 captureIntent)) {
12703 rc = BAD_VALUE;
12704 }
12705 }
12706
12707 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12708 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12709 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12710 blackLevelLock)) {
12711 rc = BAD_VALUE;
12712 }
12713 }
12714
12715 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12716 uint8_t lensShadingMapMode =
12717 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12718 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12719 lensShadingMapMode)) {
12720 rc = BAD_VALUE;
12721 }
12722 }
12723
12724 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12725 cam_area_t roi;
12726 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012727 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012728
12729 // Map coordinate system from active array to sensor output.
12730 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12731 roi.rect.height);
12732
12733 if (scalerCropSet) {
12734 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12735 }
12736 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12737 rc = BAD_VALUE;
12738 }
12739 }
12740
12741 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12742 cam_area_t roi;
12743 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012744 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012745
12746 // Map coordinate system from active array to sensor output.
12747 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12748 roi.rect.height);
12749
12750 if (scalerCropSet) {
12751 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12752 }
12753 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12754 rc = BAD_VALUE;
12755 }
12756 }
12757
12758 // CDS for non-HFR non-video mode
12759 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12760 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12761 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12762 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12763 LOGE("Invalid CDS mode %d!", *fwk_cds);
12764 } else {
12765 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12766 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12767 rc = BAD_VALUE;
12768 }
12769 }
12770 }
12771
Thierry Strudel04e026f2016-10-10 11:27:36 -070012772 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012773 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012774 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012775 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12776 }
12777 if (m_bVideoHdrEnabled)
12778 vhdr = CAM_VIDEO_HDR_MODE_ON;
12779
Thierry Strudel54dc9782017-02-15 12:12:10 -080012780 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12781
12782 if(vhdr != curr_hdr_state)
12783 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12784
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012785 rc = setVideoHdrMode(mParameters, vhdr);
12786 if (rc != NO_ERROR) {
12787 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012788 }
12789
12790 //IR
12791 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12792 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12793 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012794 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12795 uint8_t isIRon = 0;
12796
12797 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012798 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12799 LOGE("Invalid IR mode %d!", fwk_ir);
12800 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012801 if(isIRon != curr_ir_state )
12802 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12803
Thierry Strudel04e026f2016-10-10 11:27:36 -070012804 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12805 CAM_INTF_META_IR_MODE, fwk_ir)) {
12806 rc = BAD_VALUE;
12807 }
12808 }
12809 }
12810
Thierry Strudel54dc9782017-02-15 12:12:10 -080012811 //Binning Correction Mode
12812 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12813 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12814 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12815 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12816 || (0 > fwk_binning_correction)) {
12817 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12818 } else {
12819 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12820 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12821 rc = BAD_VALUE;
12822 }
12823 }
12824 }
12825
Thierry Strudel269c81a2016-10-12 12:13:59 -070012826 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12827 float aec_speed;
12828 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12829 LOGD("AEC Speed :%f", aec_speed);
12830 if ( aec_speed < 0 ) {
12831 LOGE("Invalid AEC mode %f!", aec_speed);
12832 } else {
12833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12834 aec_speed)) {
12835 rc = BAD_VALUE;
12836 }
12837 }
12838 }
12839
12840 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12841 float awb_speed;
12842 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12843 LOGD("AWB Speed :%f", awb_speed);
12844 if ( awb_speed < 0 ) {
12845 LOGE("Invalid AWB mode %f!", awb_speed);
12846 } else {
12847 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12848 awb_speed)) {
12849 rc = BAD_VALUE;
12850 }
12851 }
12852 }
12853
Thierry Strudel3d639192016-09-09 11:52:26 -070012854 // TNR
12855 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12856 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12857 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012858 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012859 cam_denoise_param_t tnr;
12860 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12861 tnr.process_plates =
12862 (cam_denoise_process_type_t)frame_settings.find(
12863 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12864 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012865
12866 if(b_TnrRequested != curr_tnr_state)
12867 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12868
Thierry Strudel3d639192016-09-09 11:52:26 -070012869 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12870 rc = BAD_VALUE;
12871 }
12872 }
12873
Thierry Strudel54dc9782017-02-15 12:12:10 -080012874 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012875 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012876 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012877 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12878 *exposure_metering_mode)) {
12879 rc = BAD_VALUE;
12880 }
12881 }
12882
Thierry Strudel3d639192016-09-09 11:52:26 -070012883 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12884 int32_t fwk_testPatternMode =
12885 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12886 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12887 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12888
12889 if (NAME_NOT_FOUND != testPatternMode) {
12890 cam_test_pattern_data_t testPatternData;
12891 memset(&testPatternData, 0, sizeof(testPatternData));
12892 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12893 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12894 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12895 int32_t *fwk_testPatternData =
12896 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12897 testPatternData.r = fwk_testPatternData[0];
12898 testPatternData.b = fwk_testPatternData[3];
12899 switch (gCamCapability[mCameraId]->color_arrangement) {
12900 case CAM_FILTER_ARRANGEMENT_RGGB:
12901 case CAM_FILTER_ARRANGEMENT_GRBG:
12902 testPatternData.gr = fwk_testPatternData[1];
12903 testPatternData.gb = fwk_testPatternData[2];
12904 break;
12905 case CAM_FILTER_ARRANGEMENT_GBRG:
12906 case CAM_FILTER_ARRANGEMENT_BGGR:
12907 testPatternData.gr = fwk_testPatternData[2];
12908 testPatternData.gb = fwk_testPatternData[1];
12909 break;
12910 default:
12911 LOGE("color arrangement %d is not supported",
12912 gCamCapability[mCameraId]->color_arrangement);
12913 break;
12914 }
12915 }
12916 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12917 testPatternData)) {
12918 rc = BAD_VALUE;
12919 }
12920 } else {
12921 LOGE("Invalid framework sensor test pattern mode %d",
12922 fwk_testPatternMode);
12923 }
12924 }
12925
12926 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12927 size_t count = 0;
12928 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12929 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12930 gps_coords.data.d, gps_coords.count, count);
12931 if (gps_coords.count != count) {
12932 rc = BAD_VALUE;
12933 }
12934 }
12935
12936 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12937 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12938 size_t count = 0;
12939 const char *gps_methods_src = (const char *)
12940 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12941 memset(gps_methods, '\0', sizeof(gps_methods));
12942 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12943 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12944 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12945 if (GPS_PROCESSING_METHOD_SIZE != count) {
12946 rc = BAD_VALUE;
12947 }
12948 }
12949
12950 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12951 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12952 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12953 gps_timestamp)) {
12954 rc = BAD_VALUE;
12955 }
12956 }
12957
12958 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12959 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12960 cam_rotation_info_t rotation_info;
12961 if (orientation == 0) {
12962 rotation_info.rotation = ROTATE_0;
12963 } else if (orientation == 90) {
12964 rotation_info.rotation = ROTATE_90;
12965 } else if (orientation == 180) {
12966 rotation_info.rotation = ROTATE_180;
12967 } else if (orientation == 270) {
12968 rotation_info.rotation = ROTATE_270;
12969 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012970 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012971 rotation_info.streamId = snapshotStreamId;
12972 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12973 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12974 rc = BAD_VALUE;
12975 }
12976 }
12977
12978 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12979 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12980 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12981 rc = BAD_VALUE;
12982 }
12983 }
12984
12985 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12986 uint32_t thumb_quality = (uint32_t)
12987 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12988 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12989 thumb_quality)) {
12990 rc = BAD_VALUE;
12991 }
12992 }
12993
12994 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12995 cam_dimension_t dim;
12996 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12997 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12998 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12999 rc = BAD_VALUE;
13000 }
13001 }
13002
13003 // Internal metadata
13004 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13005 size_t count = 0;
13006 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13007 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13008 privatedata.data.i32, privatedata.count, count);
13009 if (privatedata.count != count) {
13010 rc = BAD_VALUE;
13011 }
13012 }
13013
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013014 // ISO/Exposure Priority
13015 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13016 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13017 cam_priority_mode_t mode =
13018 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13019 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13020 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13021 use_iso_exp_pty.previewOnly = FALSE;
13022 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13023 use_iso_exp_pty.value = *ptr;
13024
13025 if(CAM_ISO_PRIORITY == mode) {
13026 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13027 use_iso_exp_pty)) {
13028 rc = BAD_VALUE;
13029 }
13030 }
13031 else {
13032 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13033 use_iso_exp_pty)) {
13034 rc = BAD_VALUE;
13035 }
13036 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013037
13038 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13039 rc = BAD_VALUE;
13040 }
13041 }
13042 } else {
13043 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13044 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013045 }
13046 }
13047
13048 // Saturation
13049 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13050 int32_t* use_saturation =
13051 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13052 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13053 rc = BAD_VALUE;
13054 }
13055 }
13056
Thierry Strudel3d639192016-09-09 11:52:26 -070013057 // EV step
13058 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13059 gCamCapability[mCameraId]->exp_compensation_step)) {
13060 rc = BAD_VALUE;
13061 }
13062
13063 // CDS info
13064 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13065 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13066 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13067
13068 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13069 CAM_INTF_META_CDS_DATA, *cdsData)) {
13070 rc = BAD_VALUE;
13071 }
13072 }
13073
Shuzhen Wang19463d72016-03-08 11:09:52 -080013074 // Hybrid AE
13075 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13076 uint8_t *hybrid_ae = (uint8_t *)
13077 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
13078
13079 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13080 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13081 rc = BAD_VALUE;
13082 }
13083 }
13084
Shuzhen Wang14415f52016-11-16 18:26:18 -080013085 // Histogram
13086 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13087 uint8_t histogramMode =
13088 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13089 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13090 histogramMode)) {
13091 rc = BAD_VALUE;
13092 }
13093 }
13094
13095 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13096 int32_t histogramBins =
13097 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13098 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13099 histogramBins)) {
13100 rc = BAD_VALUE;
13101 }
13102 }
13103
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013104 // Tracking AF
13105 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13106 uint8_t trackingAfTrigger =
13107 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13108 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13109 trackingAfTrigger)) {
13110 rc = BAD_VALUE;
13111 }
13112 }
13113
Thierry Strudel3d639192016-09-09 11:52:26 -070013114 return rc;
13115}
13116
13117/*===========================================================================
13118 * FUNCTION : captureResultCb
13119 *
13120 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13121 *
13122 * PARAMETERS :
13123 * @frame : frame information from mm-camera-interface
13124 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13125 * @userdata: userdata
13126 *
13127 * RETURN : NONE
13128 *==========================================================================*/
13129void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13130 camera3_stream_buffer_t *buffer,
13131 uint32_t frame_number, bool isInputBuffer, void *userdata)
13132{
13133 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13134 if (hw == NULL) {
13135 LOGE("Invalid hw %p", hw);
13136 return;
13137 }
13138
13139 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13140 return;
13141}
13142
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013143/*===========================================================================
13144 * FUNCTION : setBufferErrorStatus
13145 *
13146 * DESCRIPTION: Callback handler for channels to report any buffer errors
13147 *
13148 * PARAMETERS :
13149 * @ch : Channel on which buffer error is reported from
13150 * @frame_number : frame number on which buffer error is reported on
13151 * @buffer_status : buffer error status
13152 * @userdata: userdata
13153 *
13154 * RETURN : NONE
13155 *==========================================================================*/
13156void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13157 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13158{
13159 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13160 if (hw == NULL) {
13161 LOGE("Invalid hw %p", hw);
13162 return;
13163 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013164
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013165 hw->setBufferErrorStatus(ch, frame_number, err);
13166 return;
13167}
13168
13169void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13170 uint32_t frameNumber, camera3_buffer_status_t err)
13171{
13172 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13173 pthread_mutex_lock(&mMutex);
13174
13175 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13176 if (req.frame_number != frameNumber)
13177 continue;
13178 for (auto& k : req.mPendingBufferList) {
13179 if(k.stream->priv == ch) {
13180 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13181 }
13182 }
13183 }
13184
13185 pthread_mutex_unlock(&mMutex);
13186 return;
13187}
Thierry Strudel3d639192016-09-09 11:52:26 -070013188/*===========================================================================
13189 * FUNCTION : initialize
13190 *
13191 * DESCRIPTION: Pass framework callback pointers to HAL
13192 *
13193 * PARAMETERS :
13194 *
13195 *
13196 * RETURN : Success : 0
13197 * Failure: -ENODEV
13198 *==========================================================================*/
13199
13200int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13201 const camera3_callback_ops_t *callback_ops)
13202{
13203 LOGD("E");
13204 QCamera3HardwareInterface *hw =
13205 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13206 if (!hw) {
13207 LOGE("NULL camera device");
13208 return -ENODEV;
13209 }
13210
13211 int rc = hw->initialize(callback_ops);
13212 LOGD("X");
13213 return rc;
13214}
13215
13216/*===========================================================================
13217 * FUNCTION : configure_streams
13218 *
13219 * DESCRIPTION:
13220 *
13221 * PARAMETERS :
13222 *
13223 *
13224 * RETURN : Success: 0
13225 * Failure: -EINVAL (if stream configuration is invalid)
13226 * -ENODEV (fatal error)
13227 *==========================================================================*/
13228
13229int QCamera3HardwareInterface::configure_streams(
13230 const struct camera3_device *device,
13231 camera3_stream_configuration_t *stream_list)
13232{
13233 LOGD("E");
13234 QCamera3HardwareInterface *hw =
13235 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13236 if (!hw) {
13237 LOGE("NULL camera device");
13238 return -ENODEV;
13239 }
13240 int rc = hw->configureStreams(stream_list);
13241 LOGD("X");
13242 return rc;
13243}
13244
13245/*===========================================================================
13246 * FUNCTION : construct_default_request_settings
13247 *
13248 * DESCRIPTION: Configure a settings buffer to meet the required use case
13249 *
13250 * PARAMETERS :
13251 *
13252 *
13253 * RETURN : Success: Return valid metadata
13254 * Failure: Return NULL
13255 *==========================================================================*/
13256const camera_metadata_t* QCamera3HardwareInterface::
13257 construct_default_request_settings(const struct camera3_device *device,
13258 int type)
13259{
13260
13261 LOGD("E");
13262 camera_metadata_t* fwk_metadata = NULL;
13263 QCamera3HardwareInterface *hw =
13264 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13265 if (!hw) {
13266 LOGE("NULL camera device");
13267 return NULL;
13268 }
13269
13270 fwk_metadata = hw->translateCapabilityToMetadata(type);
13271
13272 LOGD("X");
13273 return fwk_metadata;
13274}
13275
13276/*===========================================================================
13277 * FUNCTION : process_capture_request
13278 *
13279 * DESCRIPTION:
13280 *
13281 * PARAMETERS :
13282 *
13283 *
13284 * RETURN :
13285 *==========================================================================*/
13286int QCamera3HardwareInterface::process_capture_request(
13287 const struct camera3_device *device,
13288 camera3_capture_request_t *request)
13289{
13290 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013291 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013292 QCamera3HardwareInterface *hw =
13293 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13294 if (!hw) {
13295 LOGE("NULL camera device");
13296 return -EINVAL;
13297 }
13298
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013299 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013300 LOGD("X");
13301 return rc;
13302}
13303
13304/*===========================================================================
13305 * FUNCTION : dump
13306 *
13307 * DESCRIPTION:
13308 *
13309 * PARAMETERS :
13310 *
13311 *
13312 * RETURN :
13313 *==========================================================================*/
13314
13315void QCamera3HardwareInterface::dump(
13316 const struct camera3_device *device, int fd)
13317{
13318 /* Log level property is read when "adb shell dumpsys media.camera" is
13319 called so that the log level can be controlled without restarting
13320 the media server */
13321 getLogLevel();
13322
13323 LOGD("E");
13324 QCamera3HardwareInterface *hw =
13325 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13326 if (!hw) {
13327 LOGE("NULL camera device");
13328 return;
13329 }
13330
13331 hw->dump(fd);
13332 LOGD("X");
13333 return;
13334}
13335
13336/*===========================================================================
13337 * FUNCTION : flush
13338 *
13339 * DESCRIPTION:
13340 *
13341 * PARAMETERS :
13342 *
13343 *
13344 * RETURN :
13345 *==========================================================================*/
13346
13347int QCamera3HardwareInterface::flush(
13348 const struct camera3_device *device)
13349{
13350 int rc;
13351 LOGD("E");
13352 QCamera3HardwareInterface *hw =
13353 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13354 if (!hw) {
13355 LOGE("NULL camera device");
13356 return -EINVAL;
13357 }
13358
13359 pthread_mutex_lock(&hw->mMutex);
13360 // Validate current state
13361 switch (hw->mState) {
13362 case STARTED:
13363 /* valid state */
13364 break;
13365
13366 case ERROR:
13367 pthread_mutex_unlock(&hw->mMutex);
13368 hw->handleCameraDeviceError();
13369 return -ENODEV;
13370
13371 default:
13372 LOGI("Flush returned during state %d", hw->mState);
13373 pthread_mutex_unlock(&hw->mMutex);
13374 return 0;
13375 }
13376 pthread_mutex_unlock(&hw->mMutex);
13377
13378 rc = hw->flush(true /* restart channels */ );
13379 LOGD("X");
13380 return rc;
13381}
13382
13383/*===========================================================================
13384 * FUNCTION : close_camera_device
13385 *
13386 * DESCRIPTION:
13387 *
13388 * PARAMETERS :
13389 *
13390 *
13391 * RETURN :
13392 *==========================================================================*/
13393int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13394{
13395 int ret = NO_ERROR;
13396 QCamera3HardwareInterface *hw =
13397 reinterpret_cast<QCamera3HardwareInterface *>(
13398 reinterpret_cast<camera3_device_t *>(device)->priv);
13399 if (!hw) {
13400 LOGE("NULL camera device");
13401 return BAD_VALUE;
13402 }
13403
13404 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13405 delete hw;
13406 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013407 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013408 return ret;
13409}
13410
13411/*===========================================================================
13412 * FUNCTION : getWaveletDenoiseProcessPlate
13413 *
13414 * DESCRIPTION: query wavelet denoise process plate
13415 *
13416 * PARAMETERS : None
13417 *
13418 * RETURN : WNR prcocess plate value
13419 *==========================================================================*/
13420cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13421{
13422 char prop[PROPERTY_VALUE_MAX];
13423 memset(prop, 0, sizeof(prop));
13424 property_get("persist.denoise.process.plates", prop, "0");
13425 int processPlate = atoi(prop);
13426 switch(processPlate) {
13427 case 0:
13428 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13429 case 1:
13430 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13431 case 2:
13432 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13433 case 3:
13434 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13435 default:
13436 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13437 }
13438}
13439
13440
13441/*===========================================================================
13442 * FUNCTION : getTemporalDenoiseProcessPlate
13443 *
13444 * DESCRIPTION: query temporal denoise process plate
13445 *
13446 * PARAMETERS : None
13447 *
13448 * RETURN : TNR prcocess plate value
13449 *==========================================================================*/
13450cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13451{
13452 char prop[PROPERTY_VALUE_MAX];
13453 memset(prop, 0, sizeof(prop));
13454 property_get("persist.tnr.process.plates", prop, "0");
13455 int processPlate = atoi(prop);
13456 switch(processPlate) {
13457 case 0:
13458 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13459 case 1:
13460 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13461 case 2:
13462 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13463 case 3:
13464 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13465 default:
13466 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13467 }
13468}
13469
13470
13471/*===========================================================================
13472 * FUNCTION : extractSceneMode
13473 *
13474 * DESCRIPTION: Extract scene mode from frameworks set metadata
13475 *
13476 * PARAMETERS :
13477 * @frame_settings: CameraMetadata reference
13478 * @metaMode: ANDROID_CONTORL_MODE
13479 * @hal_metadata: hal metadata structure
13480 *
13481 * RETURN : None
13482 *==========================================================================*/
13483int32_t QCamera3HardwareInterface::extractSceneMode(
13484 const CameraMetadata &frame_settings, uint8_t metaMode,
13485 metadata_buffer_t *hal_metadata)
13486{
13487 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013488 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13489
13490 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13491 LOGD("Ignoring control mode OFF_KEEP_STATE");
13492 return NO_ERROR;
13493 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013494
13495 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13496 camera_metadata_ro_entry entry =
13497 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13498 if (0 == entry.count)
13499 return rc;
13500
13501 uint8_t fwk_sceneMode = entry.data.u8[0];
13502
13503 int val = lookupHalName(SCENE_MODES_MAP,
13504 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13505 fwk_sceneMode);
13506 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013507 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013508 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013509 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013510 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013511
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013512 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13513 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13514 }
13515
13516 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13517 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013518 cam_hdr_param_t hdr_params;
13519 hdr_params.hdr_enable = 1;
13520 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13521 hdr_params.hdr_need_1x = false;
13522 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13523 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13524 rc = BAD_VALUE;
13525 }
13526 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013527
Thierry Strudel3d639192016-09-09 11:52:26 -070013528 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13529 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13530 rc = BAD_VALUE;
13531 }
13532 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013533
13534 if (mForceHdrSnapshot) {
13535 cam_hdr_param_t hdr_params;
13536 hdr_params.hdr_enable = 1;
13537 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13538 hdr_params.hdr_need_1x = false;
13539 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13540 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13541 rc = BAD_VALUE;
13542 }
13543 }
13544
Thierry Strudel3d639192016-09-09 11:52:26 -070013545 return rc;
13546}
13547
13548/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013549 * FUNCTION : setVideoHdrMode
13550 *
13551 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13552 *
13553 * PARAMETERS :
13554 * @hal_metadata: hal metadata structure
13555 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13556 *
13557 * RETURN : None
13558 *==========================================================================*/
13559int32_t QCamera3HardwareInterface::setVideoHdrMode(
13560 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13561{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013562 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13563 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13564 }
13565
13566 LOGE("Invalid Video HDR mode %d!", vhdr);
13567 return BAD_VALUE;
13568}
13569
13570/*===========================================================================
13571 * FUNCTION : setSensorHDR
13572 *
13573 * DESCRIPTION: Enable/disable sensor HDR.
13574 *
13575 * PARAMETERS :
13576 * @hal_metadata: hal metadata structure
13577 * @enable: boolean whether to enable/disable sensor HDR
13578 *
13579 * RETURN : None
13580 *==========================================================================*/
13581int32_t QCamera3HardwareInterface::setSensorHDR(
13582 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13583{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013584 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013585 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13586
13587 if (enable) {
13588 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13589 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13590 #ifdef _LE_CAMERA_
13591 //Default to staggered HDR for IOT
13592 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13593 #else
13594 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13595 #endif
13596 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13597 }
13598
13599 bool isSupported = false;
13600 switch (sensor_hdr) {
13601 case CAM_SENSOR_HDR_IN_SENSOR:
13602 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13603 CAM_QCOM_FEATURE_SENSOR_HDR) {
13604 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013605 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013606 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013607 break;
13608 case CAM_SENSOR_HDR_ZIGZAG:
13609 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13610 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13611 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013612 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013613 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013614 break;
13615 case CAM_SENSOR_HDR_STAGGERED:
13616 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13617 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13618 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013619 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013620 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013621 break;
13622 case CAM_SENSOR_HDR_OFF:
13623 isSupported = true;
13624 LOGD("Turning off sensor HDR");
13625 break;
13626 default:
13627 LOGE("HDR mode %d not supported", sensor_hdr);
13628 rc = BAD_VALUE;
13629 break;
13630 }
13631
13632 if(isSupported) {
13633 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13634 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13635 rc = BAD_VALUE;
13636 } else {
13637 if(!isVideoHdrEnable)
13638 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013639 }
13640 }
13641 return rc;
13642}
13643
13644/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013645 * FUNCTION : needRotationReprocess
13646 *
13647 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13648 *
13649 * PARAMETERS : none
13650 *
13651 * RETURN : true: needed
13652 * false: no need
13653 *==========================================================================*/
13654bool QCamera3HardwareInterface::needRotationReprocess()
13655{
13656 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13657 // current rotation is not zero, and pp has the capability to process rotation
13658 LOGH("need do reprocess for rotation");
13659 return true;
13660 }
13661
13662 return false;
13663}
13664
13665/*===========================================================================
13666 * FUNCTION : needReprocess
13667 *
13668 * DESCRIPTION: if reprocess in needed
13669 *
13670 * PARAMETERS : none
13671 *
13672 * RETURN : true: needed
13673 * false: no need
13674 *==========================================================================*/
13675bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13676{
13677 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13678 // TODO: add for ZSL HDR later
13679 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13680 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13681 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13682 return true;
13683 } else {
13684 LOGH("already post processed frame");
13685 return false;
13686 }
13687 }
13688 return needRotationReprocess();
13689}
13690
13691/*===========================================================================
13692 * FUNCTION : needJpegExifRotation
13693 *
13694 * DESCRIPTION: if rotation from jpeg is needed
13695 *
13696 * PARAMETERS : none
13697 *
13698 * RETURN : true: needed
13699 * false: no need
13700 *==========================================================================*/
13701bool QCamera3HardwareInterface::needJpegExifRotation()
13702{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013703 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013704 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13705 LOGD("Need use Jpeg EXIF Rotation");
13706 return true;
13707 }
13708 return false;
13709}
13710
13711/*===========================================================================
13712 * FUNCTION : addOfflineReprocChannel
13713 *
13714 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13715 * coming from input channel
13716 *
13717 * PARAMETERS :
13718 * @config : reprocess configuration
13719 * @inputChHandle : pointer to the input (source) channel
13720 *
13721 *
13722 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13723 *==========================================================================*/
13724QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13725 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13726{
13727 int32_t rc = NO_ERROR;
13728 QCamera3ReprocessChannel *pChannel = NULL;
13729
13730 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013731 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13732 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013733 if (NULL == pChannel) {
13734 LOGE("no mem for reprocess channel");
13735 return NULL;
13736 }
13737
13738 rc = pChannel->initialize(IS_TYPE_NONE);
13739 if (rc != NO_ERROR) {
13740 LOGE("init reprocess channel failed, ret = %d", rc);
13741 delete pChannel;
13742 return NULL;
13743 }
13744
13745 // pp feature config
13746 cam_pp_feature_config_t pp_config;
13747 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13748
13749 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13750 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13751 & CAM_QCOM_FEATURE_DSDN) {
13752 //Use CPP CDS incase h/w supports it.
13753 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13754 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13755 }
13756 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13757 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13758 }
13759
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013760 if (config.hdr_param.hdr_enable) {
13761 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13762 pp_config.hdr_param = config.hdr_param;
13763 }
13764
13765 if (mForceHdrSnapshot) {
13766 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13767 pp_config.hdr_param.hdr_enable = 1;
13768 pp_config.hdr_param.hdr_need_1x = 0;
13769 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13770 }
13771
Thierry Strudel3d639192016-09-09 11:52:26 -070013772 rc = pChannel->addReprocStreamsFromSource(pp_config,
13773 config,
13774 IS_TYPE_NONE,
13775 mMetadataChannel);
13776
13777 if (rc != NO_ERROR) {
13778 delete pChannel;
13779 return NULL;
13780 }
13781 return pChannel;
13782}
13783
13784/*===========================================================================
13785 * FUNCTION : getMobicatMask
13786 *
13787 * DESCRIPTION: returns mobicat mask
13788 *
13789 * PARAMETERS : none
13790 *
13791 * RETURN : mobicat mask
13792 *
13793 *==========================================================================*/
13794uint8_t QCamera3HardwareInterface::getMobicatMask()
13795{
13796 return m_MobicatMask;
13797}
13798
13799/*===========================================================================
13800 * FUNCTION : setMobicat
13801 *
13802 * DESCRIPTION: set Mobicat on/off.
13803 *
13804 * PARAMETERS :
13805 * @params : none
13806 *
13807 * RETURN : int32_t type of status
13808 * NO_ERROR -- success
13809 * none-zero failure code
13810 *==========================================================================*/
13811int32_t QCamera3HardwareInterface::setMobicat()
13812{
Thierry Strudel3d639192016-09-09 11:52:26 -070013813 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013814
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013815 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013816 tune_cmd_t tune_cmd;
13817 tune_cmd.type = SET_RELOAD_CHROMATIX;
13818 tune_cmd.module = MODULE_ALL;
13819 tune_cmd.value = TRUE;
13820 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13821 CAM_INTF_PARM_SET_VFE_COMMAND,
13822 tune_cmd);
13823
13824 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13825 CAM_INTF_PARM_SET_PP_COMMAND,
13826 tune_cmd);
13827 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013828
13829 return ret;
13830}
13831
13832/*===========================================================================
13833* FUNCTION : getLogLevel
13834*
13835* DESCRIPTION: Reads the log level property into a variable
13836*
13837* PARAMETERS :
13838* None
13839*
13840* RETURN :
13841* None
13842*==========================================================================*/
13843void QCamera3HardwareInterface::getLogLevel()
13844{
13845 char prop[PROPERTY_VALUE_MAX];
13846 uint32_t globalLogLevel = 0;
13847
13848 property_get("persist.camera.hal.debug", prop, "0");
13849 int val = atoi(prop);
13850 if (0 <= val) {
13851 gCamHal3LogLevel = (uint32_t)val;
13852 }
13853
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013854 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013855 gKpiDebugLevel = atoi(prop);
13856
13857 property_get("persist.camera.global.debug", prop, "0");
13858 val = atoi(prop);
13859 if (0 <= val) {
13860 globalLogLevel = (uint32_t)val;
13861 }
13862
13863 /* Highest log level among hal.logs and global.logs is selected */
13864 if (gCamHal3LogLevel < globalLogLevel)
13865 gCamHal3LogLevel = globalLogLevel;
13866
13867 return;
13868}
13869
13870/*===========================================================================
13871 * FUNCTION : validateStreamRotations
13872 *
13873 * DESCRIPTION: Check if the rotations requested are supported
13874 *
13875 * PARAMETERS :
13876 * @stream_list : streams to be configured
13877 *
13878 * RETURN : NO_ERROR on success
13879 * -EINVAL on failure
13880 *
13881 *==========================================================================*/
13882int QCamera3HardwareInterface::validateStreamRotations(
13883 camera3_stream_configuration_t *streamList)
13884{
13885 int rc = NO_ERROR;
13886
13887 /*
13888 * Loop through all streams requested in configuration
13889 * Check if unsupported rotations have been requested on any of them
13890 */
13891 for (size_t j = 0; j < streamList->num_streams; j++){
13892 camera3_stream_t *newStream = streamList->streams[j];
13893
Emilian Peev35ceeed2017-06-29 11:58:56 -070013894 switch(newStream->rotation) {
13895 case CAMERA3_STREAM_ROTATION_0:
13896 case CAMERA3_STREAM_ROTATION_90:
13897 case CAMERA3_STREAM_ROTATION_180:
13898 case CAMERA3_STREAM_ROTATION_270:
13899 //Expected values
13900 break;
13901 default:
13902 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13903 "type:%d and stream format:%d", __func__,
13904 newStream->rotation, newStream->stream_type,
13905 newStream->format);
13906 return -EINVAL;
13907 }
13908
Thierry Strudel3d639192016-09-09 11:52:26 -070013909 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13910 bool isImplDef = (newStream->format ==
13911 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13912 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13913 isImplDef);
13914
13915 if (isRotated && (!isImplDef || isZsl)) {
13916 LOGE("Error: Unsupported rotation of %d requested for stream"
13917 "type:%d and stream format:%d",
13918 newStream->rotation, newStream->stream_type,
13919 newStream->format);
13920 rc = -EINVAL;
13921 break;
13922 }
13923 }
13924
13925 return rc;
13926}
13927
13928/*===========================================================================
13929* FUNCTION : getFlashInfo
13930*
13931* DESCRIPTION: Retrieve information about whether the device has a flash.
13932*
13933* PARAMETERS :
13934* @cameraId : Camera id to query
13935* @hasFlash : Boolean indicating whether there is a flash device
13936* associated with given camera
13937* @flashNode : If a flash device exists, this will be its device node.
13938*
13939* RETURN :
13940* None
13941*==========================================================================*/
13942void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13943 bool& hasFlash,
13944 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13945{
13946 cam_capability_t* camCapability = gCamCapability[cameraId];
13947 if (NULL == camCapability) {
13948 hasFlash = false;
13949 flashNode[0] = '\0';
13950 } else {
13951 hasFlash = camCapability->flash_available;
13952 strlcpy(flashNode,
13953 (char*)camCapability->flash_dev_name,
13954 QCAMERA_MAX_FILEPATH_LENGTH);
13955 }
13956}
13957
13958/*===========================================================================
13959* FUNCTION : getEepromVersionInfo
13960*
13961* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13962*
13963* PARAMETERS : None
13964*
13965* RETURN : string describing EEPROM version
13966* "\0" if no such info available
13967*==========================================================================*/
13968const char *QCamera3HardwareInterface::getEepromVersionInfo()
13969{
13970 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13971}
13972
13973/*===========================================================================
13974* FUNCTION : getLdafCalib
13975*
13976* DESCRIPTION: Retrieve Laser AF calibration data
13977*
13978* PARAMETERS : None
13979*
13980* RETURN : Two uint32_t describing laser AF calibration data
13981* NULL if none is available.
13982*==========================================================================*/
13983const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13984{
13985 if (mLdafCalibExist) {
13986 return &mLdafCalib[0];
13987 } else {
13988 return NULL;
13989 }
13990}
13991
13992/*===========================================================================
13993 * FUNCTION : dynamicUpdateMetaStreamInfo
13994 *
13995 * DESCRIPTION: This function:
13996 * (1) stops all the channels
13997 * (2) returns error on pending requests and buffers
13998 * (3) sends metastream_info in setparams
13999 * (4) starts all channels
14000 * This is useful when sensor has to be restarted to apply any
14001 * settings such as frame rate from a different sensor mode
14002 *
14003 * PARAMETERS : None
14004 *
14005 * RETURN : NO_ERROR on success
14006 * Error codes on failure
14007 *
14008 *==========================================================================*/
14009int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14010{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014011 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014012 int rc = NO_ERROR;
14013
14014 LOGD("E");
14015
14016 rc = stopAllChannels();
14017 if (rc < 0) {
14018 LOGE("stopAllChannels failed");
14019 return rc;
14020 }
14021
14022 rc = notifyErrorForPendingRequests();
14023 if (rc < 0) {
14024 LOGE("notifyErrorForPendingRequests failed");
14025 return rc;
14026 }
14027
14028 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14029 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14030 "Format:%d",
14031 mStreamConfigInfo.type[i],
14032 mStreamConfigInfo.stream_sizes[i].width,
14033 mStreamConfigInfo.stream_sizes[i].height,
14034 mStreamConfigInfo.postprocess_mask[i],
14035 mStreamConfigInfo.format[i]);
14036 }
14037
14038 /* Send meta stream info once again so that ISP can start */
14039 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14040 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14041 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14042 mParameters);
14043 if (rc < 0) {
14044 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14045 }
14046
14047 rc = startAllChannels();
14048 if (rc < 0) {
14049 LOGE("startAllChannels failed");
14050 return rc;
14051 }
14052
14053 LOGD("X");
14054 return rc;
14055}
14056
14057/*===========================================================================
14058 * FUNCTION : stopAllChannels
14059 *
14060 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14061 *
14062 * PARAMETERS : None
14063 *
14064 * RETURN : NO_ERROR on success
14065 * Error codes on failure
14066 *
14067 *==========================================================================*/
14068int32_t QCamera3HardwareInterface::stopAllChannels()
14069{
14070 int32_t rc = NO_ERROR;
14071
14072 LOGD("Stopping all channels");
14073 // Stop the Streams/Channels
14074 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14075 it != mStreamInfo.end(); it++) {
14076 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14077 if (channel) {
14078 channel->stop();
14079 }
14080 (*it)->status = INVALID;
14081 }
14082
14083 if (mSupportChannel) {
14084 mSupportChannel->stop();
14085 }
14086 if (mAnalysisChannel) {
14087 mAnalysisChannel->stop();
14088 }
14089 if (mRawDumpChannel) {
14090 mRawDumpChannel->stop();
14091 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014092 if (mHdrPlusRawSrcChannel) {
14093 mHdrPlusRawSrcChannel->stop();
14094 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014095 if (mMetadataChannel) {
14096 /* If content of mStreamInfo is not 0, there is metadata stream */
14097 mMetadataChannel->stop();
14098 }
14099
14100 LOGD("All channels stopped");
14101 return rc;
14102}
14103
14104/*===========================================================================
14105 * FUNCTION : startAllChannels
14106 *
14107 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14108 *
14109 * PARAMETERS : None
14110 *
14111 * RETURN : NO_ERROR on success
14112 * Error codes on failure
14113 *
14114 *==========================================================================*/
14115int32_t QCamera3HardwareInterface::startAllChannels()
14116{
14117 int32_t rc = NO_ERROR;
14118
14119 LOGD("Start all channels ");
14120 // Start the Streams/Channels
14121 if (mMetadataChannel) {
14122 /* If content of mStreamInfo is not 0, there is metadata stream */
14123 rc = mMetadataChannel->start();
14124 if (rc < 0) {
14125 LOGE("META channel start failed");
14126 return rc;
14127 }
14128 }
14129 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14130 it != mStreamInfo.end(); it++) {
14131 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14132 if (channel) {
14133 rc = channel->start();
14134 if (rc < 0) {
14135 LOGE("channel start failed");
14136 return rc;
14137 }
14138 }
14139 }
14140 if (mAnalysisChannel) {
14141 mAnalysisChannel->start();
14142 }
14143 if (mSupportChannel) {
14144 rc = mSupportChannel->start();
14145 if (rc < 0) {
14146 LOGE("Support channel start failed");
14147 return rc;
14148 }
14149 }
14150 if (mRawDumpChannel) {
14151 rc = mRawDumpChannel->start();
14152 if (rc < 0) {
14153 LOGE("RAW dump channel start failed");
14154 return rc;
14155 }
14156 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014157 if (mHdrPlusRawSrcChannel) {
14158 rc = mHdrPlusRawSrcChannel->start();
14159 if (rc < 0) {
14160 LOGE("HDR+ RAW channel start failed");
14161 return rc;
14162 }
14163 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014164
14165 LOGD("All channels started");
14166 return rc;
14167}
14168
14169/*===========================================================================
14170 * FUNCTION : notifyErrorForPendingRequests
14171 *
14172 * DESCRIPTION: This function sends error for all the pending requests/buffers
14173 *
14174 * PARAMETERS : None
14175 *
14176 * RETURN : Error codes
14177 * NO_ERROR on success
14178 *
14179 *==========================================================================*/
14180int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14181{
Emilian Peev7650c122017-01-19 08:24:33 -080014182 notifyErrorFoPendingDepthData(mDepthChannel);
14183
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014184 auto pendingRequest = mPendingRequestsList.begin();
14185 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014186
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014187 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14188 // buffers (for which buffers aren't sent yet).
14189 while (pendingRequest != mPendingRequestsList.end() ||
14190 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14191 if (pendingRequest == mPendingRequestsList.end() ||
14192 pendingBuffer->frame_number < pendingRequest->frame_number) {
14193 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14194 // with error.
14195 for (auto &info : pendingBuffer->mPendingBufferList) {
14196 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014197 camera3_notify_msg_t notify_msg;
14198 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14199 notify_msg.type = CAMERA3_MSG_ERROR;
14200 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014201 notify_msg.message.error.error_stream = info.stream;
14202 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014203 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014204
14205 camera3_stream_buffer_t buffer = {};
14206 buffer.acquire_fence = -1;
14207 buffer.release_fence = -1;
14208 buffer.buffer = info.buffer;
14209 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14210 buffer.stream = info.stream;
14211 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014212 }
14213
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014214 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14215 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14216 pendingBuffer->frame_number > pendingRequest->frame_number) {
14217 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014218 camera3_notify_msg_t notify_msg;
14219 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14220 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014221 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14222 notify_msg.message.error.error_stream = nullptr;
14223 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014224 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014225
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014226 if (pendingRequest->input_buffer != nullptr) {
14227 camera3_capture_result result = {};
14228 result.frame_number = pendingRequest->frame_number;
14229 result.result = nullptr;
14230 result.input_buffer = pendingRequest->input_buffer;
14231 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014232 }
14233
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014234 mShutterDispatcher.clear(pendingRequest->frame_number);
14235 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14236 } else {
14237 // If both buffers and result metadata weren't sent yet, notify about a request error
14238 // and return buffers with error.
14239 for (auto &info : pendingBuffer->mPendingBufferList) {
14240 camera3_notify_msg_t notify_msg;
14241 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14242 notify_msg.type = CAMERA3_MSG_ERROR;
14243 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14244 notify_msg.message.error.error_stream = info.stream;
14245 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14246 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014247
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014248 camera3_stream_buffer_t buffer = {};
14249 buffer.acquire_fence = -1;
14250 buffer.release_fence = -1;
14251 buffer.buffer = info.buffer;
14252 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14253 buffer.stream = info.stream;
14254 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14255 }
14256
14257 if (pendingRequest->input_buffer != nullptr) {
14258 camera3_capture_result result = {};
14259 result.frame_number = pendingRequest->frame_number;
14260 result.result = nullptr;
14261 result.input_buffer = pendingRequest->input_buffer;
14262 orchestrateResult(&result);
14263 }
14264
14265 mShutterDispatcher.clear(pendingRequest->frame_number);
14266 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14267 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014268 }
14269 }
14270
14271 /* Reset pending frame Drop list and requests list */
14272 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014273 mShutterDispatcher.clear();
14274 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014275 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014276 mExpectedFrameDuration = 0;
14277 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014278 LOGH("Cleared all the pending buffers ");
14279
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014280 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014281}
14282
14283bool QCamera3HardwareInterface::isOnEncoder(
14284 const cam_dimension_t max_viewfinder_size,
14285 uint32_t width, uint32_t height)
14286{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014287 return ((width > (uint32_t)max_viewfinder_size.width) ||
14288 (height > (uint32_t)max_viewfinder_size.height) ||
14289 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14290 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014291}
14292
14293/*===========================================================================
14294 * FUNCTION : setBundleInfo
14295 *
14296 * DESCRIPTION: Set bundle info for all streams that are bundle.
14297 *
14298 * PARAMETERS : None
14299 *
14300 * RETURN : NO_ERROR on success
14301 * Error codes on failure
14302 *==========================================================================*/
14303int32_t QCamera3HardwareInterface::setBundleInfo()
14304{
14305 int32_t rc = NO_ERROR;
14306
14307 if (mChannelHandle) {
14308 cam_bundle_config_t bundleInfo;
14309 memset(&bundleInfo, 0, sizeof(bundleInfo));
14310 rc = mCameraHandle->ops->get_bundle_info(
14311 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14312 if (rc != NO_ERROR) {
14313 LOGE("get_bundle_info failed");
14314 return rc;
14315 }
14316 if (mAnalysisChannel) {
14317 mAnalysisChannel->setBundleInfo(bundleInfo);
14318 }
14319 if (mSupportChannel) {
14320 mSupportChannel->setBundleInfo(bundleInfo);
14321 }
14322 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14323 it != mStreamInfo.end(); it++) {
14324 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14325 channel->setBundleInfo(bundleInfo);
14326 }
14327 if (mRawDumpChannel) {
14328 mRawDumpChannel->setBundleInfo(bundleInfo);
14329 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014330 if (mHdrPlusRawSrcChannel) {
14331 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14332 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014333 }
14334
14335 return rc;
14336}
14337
14338/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014339 * FUNCTION : setInstantAEC
14340 *
14341 * DESCRIPTION: Set Instant AEC related params.
14342 *
14343 * PARAMETERS :
14344 * @meta: CameraMetadata reference
14345 *
14346 * RETURN : NO_ERROR on success
14347 * Error codes on failure
14348 *==========================================================================*/
14349int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14350{
14351 int32_t rc = NO_ERROR;
14352 uint8_t val = 0;
14353 char prop[PROPERTY_VALUE_MAX];
14354
14355 // First try to configure instant AEC from framework metadata
14356 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14357 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14358 }
14359
14360 // If framework did not set this value, try to read from set prop.
14361 if (val == 0) {
14362 memset(prop, 0, sizeof(prop));
14363 property_get("persist.camera.instant.aec", prop, "0");
14364 val = (uint8_t)atoi(prop);
14365 }
14366
14367 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14368 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14369 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14370 mInstantAEC = val;
14371 mInstantAECSettledFrameNumber = 0;
14372 mInstantAecFrameIdxCount = 0;
14373 LOGH("instantAEC value set %d",val);
14374 if (mInstantAEC) {
14375 memset(prop, 0, sizeof(prop));
14376 property_get("persist.camera.ae.instant.bound", prop, "10");
14377 int32_t aec_frame_skip_cnt = atoi(prop);
14378 if (aec_frame_skip_cnt >= 0) {
14379 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14380 } else {
14381 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14382 rc = BAD_VALUE;
14383 }
14384 }
14385 } else {
14386 LOGE("Bad instant aec value set %d", val);
14387 rc = BAD_VALUE;
14388 }
14389 return rc;
14390}
14391
14392/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014393 * FUNCTION : get_num_overall_buffers
14394 *
14395 * DESCRIPTION: Estimate number of pending buffers across all requests.
14396 *
14397 * PARAMETERS : None
14398 *
14399 * RETURN : Number of overall pending buffers
14400 *
14401 *==========================================================================*/
14402uint32_t PendingBuffersMap::get_num_overall_buffers()
14403{
14404 uint32_t sum_buffers = 0;
14405 for (auto &req : mPendingBuffersInRequest) {
14406 sum_buffers += req.mPendingBufferList.size();
14407 }
14408 return sum_buffers;
14409}
14410
14411/*===========================================================================
14412 * FUNCTION : removeBuf
14413 *
14414 * DESCRIPTION: Remove a matching buffer from tracker.
14415 *
14416 * PARAMETERS : @buffer: image buffer for the callback
14417 *
14418 * RETURN : None
14419 *
14420 *==========================================================================*/
14421void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14422{
14423 bool buffer_found = false;
14424 for (auto req = mPendingBuffersInRequest.begin();
14425 req != mPendingBuffersInRequest.end(); req++) {
14426 for (auto k = req->mPendingBufferList.begin();
14427 k != req->mPendingBufferList.end(); k++ ) {
14428 if (k->buffer == buffer) {
14429 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14430 req->frame_number, buffer);
14431 k = req->mPendingBufferList.erase(k);
14432 if (req->mPendingBufferList.empty()) {
14433 // Remove this request from Map
14434 req = mPendingBuffersInRequest.erase(req);
14435 }
14436 buffer_found = true;
14437 break;
14438 }
14439 }
14440 if (buffer_found) {
14441 break;
14442 }
14443 }
14444 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14445 get_num_overall_buffers());
14446}
14447
14448/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014449 * FUNCTION : getBufErrStatus
14450 *
14451 * DESCRIPTION: get buffer error status
14452 *
14453 * PARAMETERS : @buffer: buffer handle
14454 *
14455 * RETURN : Error status
14456 *
14457 *==========================================================================*/
14458int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14459{
14460 for (auto& req : mPendingBuffersInRequest) {
14461 for (auto& k : req.mPendingBufferList) {
14462 if (k.buffer == buffer)
14463 return k.bufStatus;
14464 }
14465 }
14466 return CAMERA3_BUFFER_STATUS_OK;
14467}
14468
14469/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014470 * FUNCTION : setPAAFSupport
14471 *
14472 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14473 * feature mask according to stream type and filter
14474 * arrangement
14475 *
14476 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14477 * @stream_type: stream type
14478 * @filter_arrangement: filter arrangement
14479 *
14480 * RETURN : None
14481 *==========================================================================*/
14482void QCamera3HardwareInterface::setPAAFSupport(
14483 cam_feature_mask_t& feature_mask,
14484 cam_stream_type_t stream_type,
14485 cam_color_filter_arrangement_t filter_arrangement)
14486{
Thierry Strudel3d639192016-09-09 11:52:26 -070014487 switch (filter_arrangement) {
14488 case CAM_FILTER_ARRANGEMENT_RGGB:
14489 case CAM_FILTER_ARRANGEMENT_GRBG:
14490 case CAM_FILTER_ARRANGEMENT_GBRG:
14491 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014492 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14493 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014494 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014495 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14496 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014497 }
14498 break;
14499 case CAM_FILTER_ARRANGEMENT_Y:
14500 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14501 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14502 }
14503 break;
14504 default:
14505 break;
14506 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014507 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14508 feature_mask, stream_type, filter_arrangement);
14509
14510
Thierry Strudel3d639192016-09-09 11:52:26 -070014511}
14512
14513/*===========================================================================
14514* FUNCTION : getSensorMountAngle
14515*
14516* DESCRIPTION: Retrieve sensor mount angle
14517*
14518* PARAMETERS : None
14519*
14520* RETURN : sensor mount angle in uint32_t
14521*==========================================================================*/
14522uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14523{
14524 return gCamCapability[mCameraId]->sensor_mount_angle;
14525}
14526
14527/*===========================================================================
14528* FUNCTION : getRelatedCalibrationData
14529*
14530* DESCRIPTION: Retrieve related system calibration data
14531*
14532* PARAMETERS : None
14533*
14534* RETURN : Pointer of related system calibration data
14535*==========================================================================*/
14536const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14537{
14538 return (const cam_related_system_calibration_data_t *)
14539 &(gCamCapability[mCameraId]->related_cam_calibration);
14540}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014541
14542/*===========================================================================
14543 * FUNCTION : is60HzZone
14544 *
14545 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14546 *
14547 * PARAMETERS : None
14548 *
14549 * RETURN : True if in 60Hz zone, False otherwise
14550 *==========================================================================*/
14551bool QCamera3HardwareInterface::is60HzZone()
14552{
14553 time_t t = time(NULL);
14554 struct tm lt;
14555
14556 struct tm* r = localtime_r(&t, &lt);
14557
14558 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14559 return true;
14560 else
14561 return false;
14562}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014563
14564/*===========================================================================
14565 * FUNCTION : adjustBlackLevelForCFA
14566 *
14567 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14568 * of bayer CFA (Color Filter Array).
14569 *
14570 * PARAMETERS : @input: black level pattern in the order of RGGB
14571 * @output: black level pattern in the order of CFA
14572 * @color_arrangement: CFA color arrangement
14573 *
14574 * RETURN : None
14575 *==========================================================================*/
14576template<typename T>
14577void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14578 T input[BLACK_LEVEL_PATTERN_CNT],
14579 T output[BLACK_LEVEL_PATTERN_CNT],
14580 cam_color_filter_arrangement_t color_arrangement)
14581{
14582 switch (color_arrangement) {
14583 case CAM_FILTER_ARRANGEMENT_GRBG:
14584 output[0] = input[1];
14585 output[1] = input[0];
14586 output[2] = input[3];
14587 output[3] = input[2];
14588 break;
14589 case CAM_FILTER_ARRANGEMENT_GBRG:
14590 output[0] = input[2];
14591 output[1] = input[3];
14592 output[2] = input[0];
14593 output[3] = input[1];
14594 break;
14595 case CAM_FILTER_ARRANGEMENT_BGGR:
14596 output[0] = input[3];
14597 output[1] = input[2];
14598 output[2] = input[1];
14599 output[3] = input[0];
14600 break;
14601 case CAM_FILTER_ARRANGEMENT_RGGB:
14602 output[0] = input[0];
14603 output[1] = input[1];
14604 output[2] = input[2];
14605 output[3] = input[3];
14606 break;
14607 default:
14608 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14609 break;
14610 }
14611}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014612
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014613void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14614 CameraMetadata &resultMetadata,
14615 std::shared_ptr<metadata_buffer_t> settings)
14616{
14617 if (settings == nullptr) {
14618 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14619 return;
14620 }
14621
14622 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14623 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14624 }
14625
14626 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14627 String8 str((const char *)gps_methods);
14628 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14629 }
14630
14631 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14632 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14633 }
14634
14635 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14636 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14637 }
14638
14639 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14640 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14641 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14642 }
14643
14644 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14645 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14646 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14647 }
14648
14649 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14650 int32_t fwk_thumb_size[2];
14651 fwk_thumb_size[0] = thumb_size->width;
14652 fwk_thumb_size[1] = thumb_size->height;
14653 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14654 }
14655
14656 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14657 uint8_t fwk_intent = intent[0];
14658 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14659 }
14660}
14661
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014662bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14663 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14664 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014665{
14666 if (hdrPlusRequest == nullptr) return false;
14667
14668 // Check noise reduction mode is high quality.
14669 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14670 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14671 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014672 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14673 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014674 return false;
14675 }
14676
14677 // Check edge mode is high quality.
14678 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14679 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14680 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14681 return false;
14682 }
14683
14684 if (request.num_output_buffers != 1 ||
14685 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14686 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014687 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14688 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14689 request.output_buffers[0].stream->width,
14690 request.output_buffers[0].stream->height,
14691 request.output_buffers[0].stream->format);
14692 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014693 return false;
14694 }
14695
14696 // Get a YUV buffer from pic channel.
14697 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14698 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14699 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14700 if (res != OK) {
14701 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14702 __FUNCTION__, strerror(-res), res);
14703 return false;
14704 }
14705
14706 pbcamera::StreamBuffer buffer;
14707 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014708 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chencec36ed2017-07-21 13:54:29 -070014709 buffer.data = yuvBuffer->fd == -1 ? yuvBuffer->buffer : nullptr;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014710 buffer.dataSize = yuvBuffer->frame_len;
14711
14712 pbcamera::CaptureRequest pbRequest;
14713 pbRequest.id = request.frame_number;
14714 pbRequest.outputBuffers.push_back(buffer);
14715
14716 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070014717 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014718 if (res != OK) {
14719 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14720 strerror(-res), res);
14721 return false;
14722 }
14723
14724 hdrPlusRequest->yuvBuffer = yuvBuffer;
14725 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14726
14727 return true;
14728}
14729
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014730status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14731{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014732 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14733 return OK;
14734 }
14735
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014736 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014737 if (res != OK) {
14738 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14739 strerror(-res), res);
14740 return res;
14741 }
14742 gHdrPlusClientOpening = true;
14743
14744 return OK;
14745}
14746
Chien-Yu Chenee335912017-02-09 17:53:20 -080014747status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14748{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014749 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014750
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014751 if (mHdrPlusModeEnabled) {
14752 return OK;
14753 }
14754
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014755 // Check if gHdrPlusClient is opened or being opened.
14756 if (gHdrPlusClient == nullptr) {
14757 if (gHdrPlusClientOpening) {
14758 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14759 return OK;
14760 }
14761
14762 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014763 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014764 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14765 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014766 return res;
14767 }
14768
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014769 // When opening HDR+ client completes, HDR+ mode will be enabled.
14770 return OK;
14771
Chien-Yu Chenee335912017-02-09 17:53:20 -080014772 }
14773
14774 // Configure stream for HDR+.
14775 res = configureHdrPlusStreamsLocked();
14776 if (res != OK) {
14777 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014778 return res;
14779 }
14780
14781 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14782 res = gHdrPlusClient->setZslHdrPlusMode(true);
14783 if (res != OK) {
14784 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014785 return res;
14786 }
14787
14788 mHdrPlusModeEnabled = true;
14789 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14790
14791 return OK;
14792}
14793
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014794void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
14795{
14796 if (gHdrPlusClientOpening) {
14797 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
14798 }
14799}
14800
Chien-Yu Chenee335912017-02-09 17:53:20 -080014801void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14802{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014803 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014804 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014805 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14806 if (res != OK) {
14807 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14808 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014809
14810 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014811 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014812 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014813 }
14814
14815 mHdrPlusModeEnabled = false;
14816 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14817}
14818
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070014819bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
14820{
14821 // Check if mPictureChannel is valid.
14822 // TODO: Support YUV (b/36693254) and RAW (b/36690506)
14823 if (mPictureChannel == nullptr) {
14824 return false;
14825 }
14826
14827 return true;
14828}
14829
Chien-Yu Chenee335912017-02-09 17:53:20 -080014830status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014831{
14832 pbcamera::InputConfiguration inputConfig;
14833 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14834 status_t res = OK;
14835
14836 // Configure HDR+ client streams.
14837 // Get input config.
14838 if (mHdrPlusRawSrcChannel) {
14839 // HDR+ input buffers will be provided by HAL.
14840 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14841 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14842 if (res != OK) {
14843 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14844 __FUNCTION__, strerror(-res), res);
14845 return res;
14846 }
14847
14848 inputConfig.isSensorInput = false;
14849 } else {
14850 // Sensor MIPI will send data to Easel.
14851 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014852 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014853 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14854 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14855 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14856 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14857 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014858 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014859 if (mSensorModeInfo.num_raw_bits != 10) {
14860 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14861 mSensorModeInfo.num_raw_bits);
14862 return BAD_VALUE;
14863 }
14864
14865 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014866 }
14867
14868 // Get output configurations.
14869 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014870 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014871
14872 // Easel may need to output YUV output buffers if mPictureChannel was created.
14873 pbcamera::StreamConfiguration yuvOutputConfig;
14874 if (mPictureChannel != nullptr) {
14875 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14876 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14877 if (res != OK) {
14878 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14879 __FUNCTION__, strerror(-res), res);
14880
14881 return res;
14882 }
14883
14884 outputStreamConfigs.push_back(yuvOutputConfig);
14885 }
14886
14887 // TODO: consider other channels for YUV output buffers.
14888
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014889 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014890 if (res != OK) {
14891 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14892 strerror(-res), res);
14893 return res;
14894 }
14895
14896 return OK;
14897}
14898
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070014899void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
14900{
14901 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
14902 // Set HAL state to error.
14903 pthread_mutex_lock(&mMutex);
14904 mState = ERROR;
14905 pthread_mutex_unlock(&mMutex);
14906
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070014907 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070014908}
14909
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014910void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14911{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014912 if (client == nullptr) {
14913 ALOGE("%s: Opened client is null.", __FUNCTION__);
14914 return;
14915 }
14916
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014917 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014918 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14919
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014920 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014921 if (!gHdrPlusClientOpening) {
14922 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14923 return;
14924 }
14925
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014926 gHdrPlusClient = std::move(client);
14927 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014928 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014929
14930 // Set static metadata.
14931 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14932 if (res != OK) {
14933 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14934 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014935 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014936 gHdrPlusClient = nullptr;
14937 return;
14938 }
14939
14940 // Enable HDR+ mode.
14941 res = enableHdrPlusModeLocked();
14942 if (res != OK) {
14943 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14944 }
14945}
14946
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014947void QCamera3HardwareInterface::onOpenFailed(status_t err)
14948{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014949 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014950 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014951 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014952 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014953}
14954
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014955void QCamera3HardwareInterface::onFatalError()
14956{
14957 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14958
14959 // Set HAL state to error.
14960 pthread_mutex_lock(&mMutex);
14961 mState = ERROR;
14962 pthread_mutex_unlock(&mMutex);
14963
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070014964 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014965}
14966
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070014967void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
14968{
14969 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
14970 __LINE__, requestId, apSensorTimestampNs);
14971
14972 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
14973}
14974
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014975void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014976 const camera_metadata_t &resultMetadata)
14977{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014978 if (result != nullptr) {
14979 if (result->outputBuffers.size() != 1) {
14980 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14981 result->outputBuffers.size());
14982 return;
14983 }
14984
14985 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14986 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14987 result->outputBuffers[0].streamId);
14988 return;
14989 }
14990
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014991 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014992 HdrPlusPendingRequest pendingRequest;
14993 {
14994 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14995 auto req = mHdrPlusPendingRequests.find(result->requestId);
14996 pendingRequest = req->second;
14997 }
14998
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014999 // Update the result metadata with the settings of the HDR+ still capture request because
15000 // the result metadata belongs to a ZSL buffer.
15001 CameraMetadata metadata;
15002 metadata = &resultMetadata;
15003 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15004 camera_metadata_t* updatedResultMetadata = metadata.release();
15005
15006 QCamera3PicChannel *picChannel =
15007 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
15008
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015009 // Check if dumping HDR+ YUV output is enabled.
15010 char prop[PROPERTY_VALUE_MAX];
15011 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15012 bool dumpYuvOutput = atoi(prop);
15013
15014 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015015 // Dump yuv buffer to a ppm file.
15016 pbcamera::StreamConfiguration outputConfig;
15017 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
15018 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
15019 if (rc == OK) {
15020 char buf[FILENAME_MAX] = {};
15021 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15022 result->requestId, result->outputBuffers[0].streamId,
15023 outputConfig.image.width, outputConfig.image.height);
15024
15025 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
15026 } else {
15027 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
15028 __FUNCTION__, strerror(-rc), rc);
15029 }
15030 }
15031
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015032 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
15033 auto halMetadata = std::make_shared<metadata_buffer_t>();
15034 clear_metadata_buffer(halMetadata.get());
15035
15036 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
15037 // encoding.
15038 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15039 halStreamId, /*minFrameDuration*/0);
15040 if (res == OK) {
15041 // Return the buffer to pic channel for encoding.
15042 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
15043 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
15044 halMetadata);
15045 } else {
15046 // Return the buffer without encoding.
15047 // TODO: This should not happen but we may want to report an error buffer to camera
15048 // service.
15049 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
15050 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
15051 strerror(-res), res);
15052 }
15053
15054 // Send HDR+ metadata to framework.
15055 {
15056 pthread_mutex_lock(&mMutex);
15057
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015058 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15059 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015060 pthread_mutex_unlock(&mMutex);
15061 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015062
15063 // Remove the HDR+ pending request.
15064 {
15065 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15066 auto req = mHdrPlusPendingRequests.find(result->requestId);
15067 mHdrPlusPendingRequests.erase(req);
15068 }
15069 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015070}
15071
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015072void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15073{
15074 if (failedResult == nullptr) {
15075 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15076 return;
15077 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015078
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015079 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015080
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015081 // Remove the pending HDR+ request.
15082 {
15083 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15084 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
15085
15086 // Return the buffer to pic channel.
15087 QCamera3PicChannel *picChannel =
15088 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
15089 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
15090
15091 mHdrPlusPendingRequests.erase(pendingRequest);
15092 }
15093
15094 pthread_mutex_lock(&mMutex);
15095
15096 // Find the pending buffers.
15097 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15098 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15099 if (pendingBuffers->frame_number == failedResult->requestId) {
15100 break;
15101 }
15102 pendingBuffers++;
15103 }
15104
15105 // Send out buffer errors for the pending buffers.
15106 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15107 std::vector<camera3_stream_buffer_t> streamBuffers;
15108 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15109 // Prepare a stream buffer.
15110 camera3_stream_buffer_t streamBuffer = {};
15111 streamBuffer.stream = buffer.stream;
15112 streamBuffer.buffer = buffer.buffer;
15113 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15114 streamBuffer.acquire_fence = -1;
15115 streamBuffer.release_fence = -1;
15116
15117 streamBuffers.push_back(streamBuffer);
15118
15119 // Send out error buffer event.
15120 camera3_notify_msg_t notify_msg = {};
15121 notify_msg.type = CAMERA3_MSG_ERROR;
15122 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15123 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15124 notify_msg.message.error.error_stream = buffer.stream;
15125
15126 orchestrateNotify(&notify_msg);
15127 }
15128
15129 camera3_capture_result_t result = {};
15130 result.frame_number = pendingBuffers->frame_number;
15131 result.num_output_buffers = streamBuffers.size();
15132 result.output_buffers = &streamBuffers[0];
15133
15134 // Send out result with buffer errors.
15135 orchestrateResult(&result);
15136
15137 // Remove pending buffers.
15138 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15139 }
15140
15141 // Remove pending request.
15142 auto halRequest = mPendingRequestsList.begin();
15143 while (halRequest != mPendingRequestsList.end()) {
15144 if (halRequest->frame_number == failedResult->requestId) {
15145 mPendingRequestsList.erase(halRequest);
15146 break;
15147 }
15148 halRequest++;
15149 }
15150
15151 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015152}
15153
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015154
15155ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15156 mParent(parent) {}
15157
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015158void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015159{
15160 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015161
15162 if (isReprocess) {
15163 mReprocessShutters.emplace(frameNumber, Shutter());
15164 } else {
15165 mShutters.emplace(frameNumber, Shutter());
15166 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015167}
15168
15169void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15170{
15171 std::lock_guard<std::mutex> lock(mLock);
15172
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015173 std::map<uint32_t, Shutter> *shutters = nullptr;
15174
15175 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015176 auto shutter = mShutters.find(frameNumber);
15177 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015178 shutter = mReprocessShutters.find(frameNumber);
15179 if (shutter == mReprocessShutters.end()) {
15180 // Shutter was already sent.
15181 return;
15182 }
15183 shutters = &mReprocessShutters;
15184 } else {
15185 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015186 }
15187
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015188 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015189 shutter->second.ready = true;
15190 shutter->second.timestamp = timestamp;
15191
15192 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015193 shutter = shutters->begin();
15194 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015195 if (!shutter->second.ready) {
15196 // If this shutter is not ready, the following shutters can't be sent.
15197 break;
15198 }
15199
15200 camera3_notify_msg_t msg = {};
15201 msg.type = CAMERA3_MSG_SHUTTER;
15202 msg.message.shutter.frame_number = shutter->first;
15203 msg.message.shutter.timestamp = shutter->second.timestamp;
15204 mParent->orchestrateNotify(&msg);
15205
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015206 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015207 }
15208}
15209
15210void ShutterDispatcher::clear(uint32_t frameNumber)
15211{
15212 std::lock_guard<std::mutex> lock(mLock);
15213 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015214 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015215}
15216
15217void ShutterDispatcher::clear()
15218{
15219 std::lock_guard<std::mutex> lock(mLock);
15220
15221 // Log errors for stale shutters.
15222 for (auto &shutter : mShutters) {
15223 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15224 __FUNCTION__, shutter.first, shutter.second.ready,
15225 shutter.second.timestamp);
15226 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015227
15228 // Log errors for stale reprocess shutters.
15229 for (auto &shutter : mReprocessShutters) {
15230 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15231 __FUNCTION__, shutter.first, shutter.second.ready,
15232 shutter.second.timestamp);
15233 }
15234
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015235 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015236 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015237}
15238
15239OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15240 mParent(parent) {}
15241
15242status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15243{
15244 std::lock_guard<std::mutex> lock(mLock);
15245 mStreamBuffers.clear();
15246 if (!streamList) {
15247 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15248 return -EINVAL;
15249 }
15250
15251 // Create a "frame-number -> buffer" map for each stream.
15252 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15253 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15254 }
15255
15256 return OK;
15257}
15258
15259status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15260{
15261 std::lock_guard<std::mutex> lock(mLock);
15262
15263 // Find the "frame-number -> buffer" map for the stream.
15264 auto buffers = mStreamBuffers.find(stream);
15265 if (buffers == mStreamBuffers.end()) {
15266 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15267 return -EINVAL;
15268 }
15269
15270 // Create an unready buffer for this frame number.
15271 buffers->second.emplace(frameNumber, Buffer());
15272 return OK;
15273}
15274
15275void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15276 const camera3_stream_buffer_t &buffer)
15277{
15278 std::lock_guard<std::mutex> lock(mLock);
15279
15280 // Find the frame number -> buffer map for the stream.
15281 auto buffers = mStreamBuffers.find(buffer.stream);
15282 if (buffers == mStreamBuffers.end()) {
15283 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15284 return;
15285 }
15286
15287 // Find the unready buffer this frame number and mark it ready.
15288 auto pendingBuffer = buffers->second.find(frameNumber);
15289 if (pendingBuffer == buffers->second.end()) {
15290 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15291 return;
15292 }
15293
15294 pendingBuffer->second.ready = true;
15295 pendingBuffer->second.buffer = buffer;
15296
15297 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15298 pendingBuffer = buffers->second.begin();
15299 while (pendingBuffer != buffers->second.end()) {
15300 if (!pendingBuffer->second.ready) {
15301 // If this buffer is not ready, the following buffers can't be sent.
15302 break;
15303 }
15304
15305 camera3_capture_result_t result = {};
15306 result.frame_number = pendingBuffer->first;
15307 result.num_output_buffers = 1;
15308 result.output_buffers = &pendingBuffer->second.buffer;
15309
15310 // Send out result with buffer errors.
15311 mParent->orchestrateResult(&result);
15312
15313 pendingBuffer = buffers->second.erase(pendingBuffer);
15314 }
15315}
15316
15317void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15318{
15319 std::lock_guard<std::mutex> lock(mLock);
15320
15321 // Log errors for stale buffers.
15322 for (auto &buffers : mStreamBuffers) {
15323 for (auto &buffer : buffers.second) {
15324 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15325 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15326 }
15327 buffers.second.clear();
15328 }
15329
15330 if (clearConfiguredStreams) {
15331 mStreamBuffers.clear();
15332 }
15333}
15334
Thierry Strudel3d639192016-09-09 11:52:26 -070015335}; //end namespace qcamera