blob: d922fd7ddd040f70a8eb7364c83ba939c0a29b4f [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Thierry Strudel04e026f2016-10-10 11:27:36 -0700117/* Face landmarks indices */
118#define LEFT_EYE_X 0
119#define LEFT_EYE_Y 1
120#define RIGHT_EYE_X 2
121#define RIGHT_EYE_Y 3
122#define MOUTH_X 4
123#define MOUTH_Y 5
124#define TOTAL_LANDMARK_INDICES 6
125
Zhijun He2a5df222017-04-04 18:20:38 -0700126// Max preferred zoom
127#define MAX_PREFERRED_ZOOM_RATIO 5.0
128
Thierry Strudel3d639192016-09-09 11:52:26 -0700129cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
130const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
131extern pthread_mutex_t gCamLock;
132volatile uint32_t gCamHal3LogLevel = 1;
133extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700134
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800135// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700136// The following Easel related variables must be protected by gHdrPlusClientLock.
137EaselManagerClient gEaselManagerClient;
138bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
139std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
140bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700141bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700142
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800143// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
144bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
146Mutex gHdrPlusClientLock; // Protect above Easel related variables.
147
Thierry Strudel3d639192016-09-09 11:52:26 -0700148
149const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
150 {"On", CAM_CDS_MODE_ON},
151 {"Off", CAM_CDS_MODE_OFF},
152 {"Auto",CAM_CDS_MODE_AUTO}
153};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700154const QCamera3HardwareInterface::QCameraMap<
155 camera_metadata_enum_android_video_hdr_mode_t,
156 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
157 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
158 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
159};
160
Thierry Strudel54dc9782017-02-15 12:12:10 -0800161const QCamera3HardwareInterface::QCameraMap<
162 camera_metadata_enum_android_binning_correction_mode_t,
163 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
164 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
165 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
166};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700167
168const QCamera3HardwareInterface::QCameraMap<
169 camera_metadata_enum_android_ir_mode_t,
170 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
171 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
172 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
173 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
174};
Thierry Strudel3d639192016-09-09 11:52:26 -0700175
176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_control_effect_mode_t,
178 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
179 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
180 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
181 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
182 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
183 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
184 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
185 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
186 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
187 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
188};
189
190const QCamera3HardwareInterface::QCameraMap<
191 camera_metadata_enum_android_control_awb_mode_t,
192 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
193 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
194 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
195 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
196 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
197 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
198 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
199 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
200 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
201 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
202};
203
204const QCamera3HardwareInterface::QCameraMap<
205 camera_metadata_enum_android_control_scene_mode_t,
206 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
207 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
208 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
209 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
210 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
211 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
212 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
213 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
214 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
215 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
216 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
217 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
218 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
219 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
220 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
221 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800222 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
223 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700224};
225
226const QCamera3HardwareInterface::QCameraMap<
227 camera_metadata_enum_android_control_af_mode_t,
228 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
229 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
230 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
231 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
232 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
233 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
234 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
235 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
236};
237
238const QCamera3HardwareInterface::QCameraMap<
239 camera_metadata_enum_android_color_correction_aberration_mode_t,
240 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
241 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
242 CAM_COLOR_CORRECTION_ABERRATION_OFF },
243 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
244 CAM_COLOR_CORRECTION_ABERRATION_FAST },
245 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
246 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
247};
248
249const QCamera3HardwareInterface::QCameraMap<
250 camera_metadata_enum_android_control_ae_antibanding_mode_t,
251 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
252 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
253 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
254 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
255 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
256};
257
258const QCamera3HardwareInterface::QCameraMap<
259 camera_metadata_enum_android_control_ae_mode_t,
260 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
261 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
262 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
263 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
264 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
265 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
266};
267
268const QCamera3HardwareInterface::QCameraMap<
269 camera_metadata_enum_android_flash_mode_t,
270 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
271 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
272 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
273 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
274};
275
276const QCamera3HardwareInterface::QCameraMap<
277 camera_metadata_enum_android_statistics_face_detect_mode_t,
278 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
279 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
280 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
281 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
286 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
287 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
288 CAM_FOCUS_UNCALIBRATED },
289 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
290 CAM_FOCUS_APPROXIMATE },
291 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
292 CAM_FOCUS_CALIBRATED }
293};
294
295const QCamera3HardwareInterface::QCameraMap<
296 camera_metadata_enum_android_lens_state_t,
297 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
298 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
299 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
300};
301
302const int32_t available_thumbnail_sizes[] = {0, 0,
303 176, 144,
304 240, 144,
305 256, 144,
306 240, 160,
307 256, 154,
308 240, 240,
309 320, 240};
310
311const QCamera3HardwareInterface::QCameraMap<
312 camera_metadata_enum_android_sensor_test_pattern_mode_t,
313 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
314 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
315 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
316 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
317 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
318 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
319 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
320};
321
322/* Since there is no mapping for all the options some Android enum are not listed.
323 * Also, the order in this list is important because while mapping from HAL to Android it will
324 * traverse from lower to higher index which means that for HAL values that are map to different
325 * Android values, the traverse logic will select the first one found.
326 */
327const QCamera3HardwareInterface::QCameraMap<
328 camera_metadata_enum_android_sensor_reference_illuminant1_t,
329 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
330 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
331 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
332 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
333 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
334 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
335 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
336 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
337 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
338 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
339 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
340 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
346};
347
348const QCamera3HardwareInterface::QCameraMap<
349 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
350 { 60, CAM_HFR_MODE_60FPS},
351 { 90, CAM_HFR_MODE_90FPS},
352 { 120, CAM_HFR_MODE_120FPS},
353 { 150, CAM_HFR_MODE_150FPS},
354 { 180, CAM_HFR_MODE_180FPS},
355 { 210, CAM_HFR_MODE_210FPS},
356 { 240, CAM_HFR_MODE_240FPS},
357 { 480, CAM_HFR_MODE_480FPS},
358};
359
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700360const QCamera3HardwareInterface::QCameraMap<
361 qcamera3_ext_instant_aec_mode_t,
362 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
363 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
364 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
365 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
366};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800367
368const QCamera3HardwareInterface::QCameraMap<
369 qcamera3_ext_exposure_meter_mode_t,
370 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
371 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
372 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
373 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
374 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
375 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
376 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
377 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
378};
379
380const QCamera3HardwareInterface::QCameraMap<
381 qcamera3_ext_iso_mode_t,
382 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
383 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
384 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
385 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
386 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
387 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
388 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
389 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
390 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
391};
392
Thierry Strudel3d639192016-09-09 11:52:26 -0700393camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
394 .initialize = QCamera3HardwareInterface::initialize,
395 .configure_streams = QCamera3HardwareInterface::configure_streams,
396 .register_stream_buffers = NULL,
397 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
398 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
399 .get_metadata_vendor_tag_ops = NULL,
400 .dump = QCamera3HardwareInterface::dump,
401 .flush = QCamera3HardwareInterface::flush,
402 .reserved = {0},
403};
404
405// initialise to some default value
406uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
407
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700408static inline void logEaselEvent(const char *tag, const char *event) {
409 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
410 struct timespec ts = {};
411 static int64_t kMsPerSec = 1000;
412 static int64_t kNsPerMs = 1000000;
413 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
414 if (res != OK) {
415 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
416 } else {
417 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
418 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
419 }
420 }
421}
422
Thierry Strudel3d639192016-09-09 11:52:26 -0700423/*===========================================================================
424 * FUNCTION : QCamera3HardwareInterface
425 *
426 * DESCRIPTION: constructor of QCamera3HardwareInterface
427 *
428 * PARAMETERS :
429 * @cameraId : camera ID
430 *
431 * RETURN : none
432 *==========================================================================*/
433QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
434 const camera_module_callbacks_t *callbacks)
435 : mCameraId(cameraId),
436 mCameraHandle(NULL),
437 mCameraInitialized(false),
438 mCallbackOps(NULL),
439 mMetadataChannel(NULL),
440 mPictureChannel(NULL),
441 mRawChannel(NULL),
442 mSupportChannel(NULL),
443 mAnalysisChannel(NULL),
444 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700445 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700446 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800447 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800448 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700449 mChannelHandle(0),
450 mFirstConfiguration(true),
451 mFlush(false),
452 mFlushPerf(false),
453 mParamHeap(NULL),
454 mParameters(NULL),
455 mPrevParameters(NULL),
456 m_bIsVideo(false),
457 m_bIs4KVideo(false),
458 m_bEisSupportedSize(false),
459 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800460 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700461 m_MobicatMask(0),
462 mMinProcessedFrameDuration(0),
463 mMinJpegFrameDuration(0),
464 mMinRawFrameDuration(0),
465 mMetaFrameCount(0U),
466 mUpdateDebugLevel(false),
467 mCallbacks(callbacks),
468 mCaptureIntent(0),
469 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700470 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800471 /* DevCamDebug metadata internal m control*/
472 mDevCamDebugMetaEnable(0),
473 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700474 mBatchSize(0),
475 mToBeQueuedVidBufs(0),
476 mHFRVideoFps(DEFAULT_VIDEO_FPS),
477 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800478 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800479 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700480 mFirstFrameNumberInBatch(0),
481 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800482 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700483 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
484 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000485 mPDSupported(false),
486 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700487 mInstantAEC(false),
488 mResetInstantAEC(false),
489 mInstantAECSettledFrameNumber(0),
490 mAecSkipDisplayFrameBound(0),
491 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800492 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700493 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700494 mLastCustIntentFrmNum(-1),
495 mState(CLOSED),
496 mIsDeviceLinked(false),
497 mIsMainCamera(true),
498 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700499 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800500 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800501 mHdrPlusModeEnabled(false),
502 mIsApInputUsedForHdrPlus(false),
503 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800504 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700505{
506 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700507 mCommon.init(gCamCapability[cameraId]);
508 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700509#ifndef USE_HAL_3_3
510 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
511#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700513#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700514 mCameraDevice.common.close = close_camera_device;
515 mCameraDevice.ops = &mCameraOps;
516 mCameraDevice.priv = this;
517 gCamCapability[cameraId]->version = CAM_HAL_V3;
518 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
519 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
520 gCamCapability[cameraId]->min_num_pp_bufs = 3;
521
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800522 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700523
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800524 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700525 mPendingLiveRequest = 0;
526 mCurrentRequestId = -1;
527 pthread_mutex_init(&mMutex, NULL);
528
529 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
530 mDefaultMetadata[i] = NULL;
531
532 // Getting system props of different kinds
533 char prop[PROPERTY_VALUE_MAX];
534 memset(prop, 0, sizeof(prop));
535 property_get("persist.camera.raw.dump", prop, "0");
536 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800537 property_get("persist.camera.hal3.force.hdr", prop, "0");
538 mForceHdrSnapshot = atoi(prop);
539
Thierry Strudel3d639192016-09-09 11:52:26 -0700540 if (mEnableRawDump)
541 LOGD("Raw dump from Camera HAL enabled");
542
543 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
544 memset(mLdafCalib, 0, sizeof(mLdafCalib));
545
546 memset(prop, 0, sizeof(prop));
547 property_get("persist.camera.tnr.preview", prop, "0");
548 m_bTnrPreview = (uint8_t)atoi(prop);
549
550 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800551 property_get("persist.camera.swtnr.preview", prop, "1");
552 m_bSwTnrPreview = (uint8_t)atoi(prop);
553
554 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700555 property_get("persist.camera.tnr.video", prop, "0");
556 m_bTnrVideo = (uint8_t)atoi(prop);
557
558 memset(prop, 0, sizeof(prop));
559 property_get("persist.camera.avtimer.debug", prop, "0");
560 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800561 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700562
Thierry Strudel54dc9782017-02-15 12:12:10 -0800563 memset(prop, 0, sizeof(prop));
564 property_get("persist.camera.cacmode.disable", prop, "0");
565 m_cacModeDisabled = (uint8_t)atoi(prop);
566
Thierry Strudel3d639192016-09-09 11:52:26 -0700567 //Load and read GPU library.
568 lib_surface_utils = NULL;
569 LINK_get_surface_pixel_alignment = NULL;
570 mSurfaceStridePadding = CAM_PAD_TO_32;
571 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
572 if (lib_surface_utils) {
573 *(void **)&LINK_get_surface_pixel_alignment =
574 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
575 if (LINK_get_surface_pixel_alignment) {
576 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
577 }
578 dlclose(lib_surface_utils);
579 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700580
Emilian Peev0f3c3162017-03-15 12:57:46 +0000581 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
582 mPDSupported = (0 <= mPDIndex) ? true : false;
583
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700584 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700585}
586
587/*===========================================================================
588 * FUNCTION : ~QCamera3HardwareInterface
589 *
590 * DESCRIPTION: destructor of QCamera3HardwareInterface
591 *
592 * PARAMETERS : none
593 *
594 * RETURN : none
595 *==========================================================================*/
596QCamera3HardwareInterface::~QCamera3HardwareInterface()
597{
598 LOGD("E");
599
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800600 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700601
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800602 // Disable power hint and enable the perf lock for close camera
603 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
604 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
605
606 // unlink of dualcam during close camera
607 if (mIsDeviceLinked) {
608 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
609 &m_pDualCamCmdPtr->bundle_info;
610 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
611 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
612 pthread_mutex_lock(&gCamLock);
613
614 if (mIsMainCamera == 1) {
615 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
616 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
617 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
618 // related session id should be session id of linked session
619 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
620 } else {
621 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
622 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
623 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
624 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
625 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800626 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800627 pthread_mutex_unlock(&gCamLock);
628
629 rc = mCameraHandle->ops->set_dual_cam_cmd(
630 mCameraHandle->camera_handle);
631 if (rc < 0) {
632 LOGE("Dualcam: Unlink failed, but still proceed to close");
633 }
634 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700635
636 /* We need to stop all streams before deleting any stream */
637 if (mRawDumpChannel) {
638 mRawDumpChannel->stop();
639 }
640
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700641 if (mHdrPlusRawSrcChannel) {
642 mHdrPlusRawSrcChannel->stop();
643 }
644
Thierry Strudel3d639192016-09-09 11:52:26 -0700645 // NOTE: 'camera3_stream_t *' objects are already freed at
646 // this stage by the framework
647 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
648 it != mStreamInfo.end(); it++) {
649 QCamera3ProcessingChannel *channel = (*it)->channel;
650 if (channel) {
651 channel->stop();
652 }
653 }
654 if (mSupportChannel)
655 mSupportChannel->stop();
656
657 if (mAnalysisChannel) {
658 mAnalysisChannel->stop();
659 }
660 if (mMetadataChannel) {
661 mMetadataChannel->stop();
662 }
663 if (mChannelHandle) {
664 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
665 mChannelHandle);
666 LOGD("stopping channel %d", mChannelHandle);
667 }
668
669 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
670 it != mStreamInfo.end(); it++) {
671 QCamera3ProcessingChannel *channel = (*it)->channel;
672 if (channel)
673 delete channel;
674 free (*it);
675 }
676 if (mSupportChannel) {
677 delete mSupportChannel;
678 mSupportChannel = NULL;
679 }
680
681 if (mAnalysisChannel) {
682 delete mAnalysisChannel;
683 mAnalysisChannel = NULL;
684 }
685 if (mRawDumpChannel) {
686 delete mRawDumpChannel;
687 mRawDumpChannel = NULL;
688 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700689 if (mHdrPlusRawSrcChannel) {
690 delete mHdrPlusRawSrcChannel;
691 mHdrPlusRawSrcChannel = NULL;
692 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700693 if (mDummyBatchChannel) {
694 delete mDummyBatchChannel;
695 mDummyBatchChannel = NULL;
696 }
697
698 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800699 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700700
701 if (mMetadataChannel) {
702 delete mMetadataChannel;
703 mMetadataChannel = NULL;
704 }
705
706 /* Clean up all channels */
707 if (mCameraInitialized) {
708 if(!mFirstConfiguration){
709 //send the last unconfigure
710 cam_stream_size_info_t stream_config_info;
711 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
712 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
713 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800714 m_bIs4KVideo ? 0 :
715 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700716 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700717 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
718 stream_config_info);
719 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
720 if (rc < 0) {
721 LOGE("set_parms failed for unconfigure");
722 }
723 }
724 deinitParameters();
725 }
726
727 if (mChannelHandle) {
728 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
729 mChannelHandle);
730 LOGH("deleting channel %d", mChannelHandle);
731 mChannelHandle = 0;
732 }
733
734 if (mState != CLOSED)
735 closeCamera();
736
737 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
738 req.mPendingBufferList.clear();
739 }
740 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700741 for (pendingRequestIterator i = mPendingRequestsList.begin();
742 i != mPendingRequestsList.end();) {
743 i = erasePendingRequest(i);
744 }
745 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
746 if (mDefaultMetadata[i])
747 free_camera_metadata(mDefaultMetadata[i]);
748
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800749 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700750
751 pthread_cond_destroy(&mRequestCond);
752
753 pthread_cond_destroy(&mBuffersCond);
754
755 pthread_mutex_destroy(&mMutex);
756 LOGD("X");
757}
758
759/*===========================================================================
760 * FUNCTION : erasePendingRequest
761 *
762 * DESCRIPTION: function to erase a desired pending request after freeing any
763 * allocated memory
764 *
765 * PARAMETERS :
766 * @i : iterator pointing to pending request to be erased
767 *
768 * RETURN : iterator pointing to the next request
769 *==========================================================================*/
770QCamera3HardwareInterface::pendingRequestIterator
771 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
772{
773 if (i->input_buffer != NULL) {
774 free(i->input_buffer);
775 i->input_buffer = NULL;
776 }
777 if (i->settings != NULL)
778 free_camera_metadata((camera_metadata_t*)i->settings);
779 return mPendingRequestsList.erase(i);
780}
781
782/*===========================================================================
783 * FUNCTION : camEvtHandle
784 *
785 * DESCRIPTION: Function registered to mm-camera-interface to handle events
786 *
787 * PARAMETERS :
788 * @camera_handle : interface layer camera handle
789 * @evt : ptr to event
790 * @user_data : user data ptr
791 *
792 * RETURN : none
793 *==========================================================================*/
794void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
795 mm_camera_event_t *evt,
796 void *user_data)
797{
798 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
799 if (obj && evt) {
800 switch(evt->server_event_type) {
801 case CAM_EVENT_TYPE_DAEMON_DIED:
802 pthread_mutex_lock(&obj->mMutex);
803 obj->mState = ERROR;
804 pthread_mutex_unlock(&obj->mMutex);
805 LOGE("Fatal, camera daemon died");
806 break;
807
808 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
809 LOGD("HAL got request pull from Daemon");
810 pthread_mutex_lock(&obj->mMutex);
811 obj->mWokenUpByDaemon = true;
812 obj->unblockRequestIfNecessary();
813 pthread_mutex_unlock(&obj->mMutex);
814 break;
815
816 default:
817 LOGW("Warning: Unhandled event %d",
818 evt->server_event_type);
819 break;
820 }
821 } else {
822 LOGE("NULL user_data/evt");
823 }
824}
825
826/*===========================================================================
827 * FUNCTION : openCamera
828 *
829 * DESCRIPTION: open camera
830 *
831 * PARAMETERS :
832 * @hw_device : double ptr for camera device struct
833 *
834 * RETURN : int32_t type of status
835 * NO_ERROR -- success
836 * none-zero failure code
837 *==========================================================================*/
838int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
839{
840 int rc = 0;
841 if (mState != CLOSED) {
842 *hw_device = NULL;
843 return PERMISSION_DENIED;
844 }
845
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800846 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700847 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
848 mCameraId);
849
850 rc = openCamera();
851 if (rc == 0) {
852 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800853 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700854 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800855 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700856
Thierry Strudel3d639192016-09-09 11:52:26 -0700857 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
858 mCameraId, rc);
859
860 if (rc == NO_ERROR) {
861 mState = OPENED;
862 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800863
Thierry Strudel3d639192016-09-09 11:52:26 -0700864 return rc;
865}
866
867/*===========================================================================
868 * FUNCTION : openCamera
869 *
870 * DESCRIPTION: open camera
871 *
872 * PARAMETERS : none
873 *
874 * RETURN : int32_t type of status
875 * NO_ERROR -- success
876 * none-zero failure code
877 *==========================================================================*/
878int QCamera3HardwareInterface::openCamera()
879{
880 int rc = 0;
881 char value[PROPERTY_VALUE_MAX];
882
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800883 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700884 if (mCameraHandle) {
885 LOGE("Failure: Camera already opened");
886 return ALREADY_EXISTS;
887 }
888
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700889 {
890 Mutex::Autolock l(gHdrPlusClientLock);
891 if (gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700892 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700893 rc = gEaselManagerClient.resume();
894 if (rc != 0) {
895 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
896 return rc;
897 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800898 }
899 }
900
Thierry Strudel3d639192016-09-09 11:52:26 -0700901 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
902 if (rc < 0) {
903 LOGE("Failed to reserve flash for camera id: %d",
904 mCameraId);
905 return UNKNOWN_ERROR;
906 }
907
908 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
909 if (rc) {
910 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
911 return rc;
912 }
913
914 if (!mCameraHandle) {
915 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
916 return -ENODEV;
917 }
918
919 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
920 camEvtHandle, (void *)this);
921
922 if (rc < 0) {
923 LOGE("Error, failed to register event callback");
924 /* Not closing camera here since it is already handled in destructor */
925 return FAILED_TRANSACTION;
926 }
927
928 mExifParams.debug_params =
929 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
930 if (mExifParams.debug_params) {
931 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
932 } else {
933 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
934 return NO_MEMORY;
935 }
936 mFirstConfiguration = true;
937
938 //Notify display HAL that a camera session is active.
939 //But avoid calling the same during bootup because camera service might open/close
940 //cameras at boot time during its initialization and display service will also internally
941 //wait for camera service to initialize first while calling this display API, resulting in a
942 //deadlock situation. Since boot time camera open/close calls are made only to fetch
943 //capabilities, no need of this display bw optimization.
944 //Use "service.bootanim.exit" property to know boot status.
945 property_get("service.bootanim.exit", value, "0");
946 if (atoi(value) == 1) {
947 pthread_mutex_lock(&gCamLock);
948 if (gNumCameraSessions++ == 0) {
949 setCameraLaunchStatus(true);
950 }
951 pthread_mutex_unlock(&gCamLock);
952 }
953
954 //fill the session id needed while linking dual cam
955 pthread_mutex_lock(&gCamLock);
956 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
957 &sessionId[mCameraId]);
958 pthread_mutex_unlock(&gCamLock);
959
960 if (rc < 0) {
961 LOGE("Error, failed to get sessiion id");
962 return UNKNOWN_ERROR;
963 } else {
964 //Allocate related cam sync buffer
965 //this is needed for the payload that goes along with bundling cmd for related
966 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700967 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
968 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700969 if(rc != OK) {
970 rc = NO_MEMORY;
971 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
972 return NO_MEMORY;
973 }
974
975 //Map memory for related cam sync buffer
976 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700977 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
978 m_pDualCamCmdHeap->getFd(0),
979 sizeof(cam_dual_camera_cmd_info_t),
980 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700981 if(rc < 0) {
982 LOGE("Dualcam: failed to map Related cam sync buffer");
983 rc = FAILED_TRANSACTION;
984 return NO_MEMORY;
985 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700986 m_pDualCamCmdPtr =
987 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700988 }
989
990 LOGH("mCameraId=%d",mCameraId);
991
992 return NO_ERROR;
993}
994
995/*===========================================================================
996 * FUNCTION : closeCamera
997 *
998 * DESCRIPTION: close camera
999 *
1000 * PARAMETERS : none
1001 *
1002 * RETURN : int32_t type of status
1003 * NO_ERROR -- success
1004 * none-zero failure code
1005 *==========================================================================*/
1006int QCamera3HardwareInterface::closeCamera()
1007{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001008 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001009 int rc = NO_ERROR;
1010 char value[PROPERTY_VALUE_MAX];
1011
1012 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1013 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001014
1015 // unmap memory for related cam sync buffer
1016 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001017 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001018 if (NULL != m_pDualCamCmdHeap) {
1019 m_pDualCamCmdHeap->deallocate();
1020 delete m_pDualCamCmdHeap;
1021 m_pDualCamCmdHeap = NULL;
1022 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001023 }
1024
Thierry Strudel3d639192016-09-09 11:52:26 -07001025 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1026 mCameraHandle = NULL;
1027
1028 //reset session id to some invalid id
1029 pthread_mutex_lock(&gCamLock);
1030 sessionId[mCameraId] = 0xDEADBEEF;
1031 pthread_mutex_unlock(&gCamLock);
1032
1033 //Notify display HAL that there is no active camera session
1034 //but avoid calling the same during bootup. Refer to openCamera
1035 //for more details.
1036 property_get("service.bootanim.exit", value, "0");
1037 if (atoi(value) == 1) {
1038 pthread_mutex_lock(&gCamLock);
1039 if (--gNumCameraSessions == 0) {
1040 setCameraLaunchStatus(false);
1041 }
1042 pthread_mutex_unlock(&gCamLock);
1043 }
1044
Thierry Strudel3d639192016-09-09 11:52:26 -07001045 if (mExifParams.debug_params) {
1046 free(mExifParams.debug_params);
1047 mExifParams.debug_params = NULL;
1048 }
1049 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1050 LOGW("Failed to release flash for camera id: %d",
1051 mCameraId);
1052 }
1053 mState = CLOSED;
1054 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1055 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001056
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001057 {
1058 Mutex::Autolock l(gHdrPlusClientLock);
1059 if (gHdrPlusClient != nullptr) {
1060 // Disable HDR+ mode.
1061 disableHdrPlusModeLocked();
1062 // Disconnect Easel if it's connected.
1063 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1064 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001065 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001066
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001067 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001068 rc = gEaselManagerClient.stopMipi(mCameraId);
1069 if (rc != 0) {
1070 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1071 }
1072
1073 rc = gEaselManagerClient.suspend();
1074 if (rc != 0) {
1075 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1076 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001077 }
1078 }
1079
Thierry Strudel3d639192016-09-09 11:52:26 -07001080 return rc;
1081}
1082
1083/*===========================================================================
1084 * FUNCTION : initialize
1085 *
1086 * DESCRIPTION: Initialize frameworks callback functions
1087 *
1088 * PARAMETERS :
1089 * @callback_ops : callback function to frameworks
1090 *
1091 * RETURN :
1092 *
1093 *==========================================================================*/
1094int QCamera3HardwareInterface::initialize(
1095 const struct camera3_callback_ops *callback_ops)
1096{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001097 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001098 int rc;
1099
1100 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1101 pthread_mutex_lock(&mMutex);
1102
1103 // Validate current state
1104 switch (mState) {
1105 case OPENED:
1106 /* valid state */
1107 break;
1108 default:
1109 LOGE("Invalid state %d", mState);
1110 rc = -ENODEV;
1111 goto err1;
1112 }
1113
1114 rc = initParameters();
1115 if (rc < 0) {
1116 LOGE("initParamters failed %d", rc);
1117 goto err1;
1118 }
1119 mCallbackOps = callback_ops;
1120
1121 mChannelHandle = mCameraHandle->ops->add_channel(
1122 mCameraHandle->camera_handle, NULL, NULL, this);
1123 if (mChannelHandle == 0) {
1124 LOGE("add_channel failed");
1125 rc = -ENOMEM;
1126 pthread_mutex_unlock(&mMutex);
1127 return rc;
1128 }
1129
1130 pthread_mutex_unlock(&mMutex);
1131 mCameraInitialized = true;
1132 mState = INITIALIZED;
1133 LOGI("X");
1134 return 0;
1135
1136err1:
1137 pthread_mutex_unlock(&mMutex);
1138 return rc;
1139}
1140
1141/*===========================================================================
1142 * FUNCTION : validateStreamDimensions
1143 *
1144 * DESCRIPTION: Check if the configuration requested are those advertised
1145 *
1146 * PARAMETERS :
1147 * @stream_list : streams to be configured
1148 *
1149 * RETURN :
1150 *
1151 *==========================================================================*/
1152int QCamera3HardwareInterface::validateStreamDimensions(
1153 camera3_stream_configuration_t *streamList)
1154{
1155 int rc = NO_ERROR;
1156 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001157 uint32_t depthWidth = 0;
1158 uint32_t depthHeight = 0;
1159 if (mPDSupported) {
1160 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1161 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1162 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001163
1164 camera3_stream_t *inputStream = NULL;
1165 /*
1166 * Loop through all streams to find input stream if it exists*
1167 */
1168 for (size_t i = 0; i< streamList->num_streams; i++) {
1169 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1170 if (inputStream != NULL) {
1171 LOGE("Error, Multiple input streams requested");
1172 return -EINVAL;
1173 }
1174 inputStream = streamList->streams[i];
1175 }
1176 }
1177 /*
1178 * Loop through all streams requested in configuration
1179 * Check if unsupported sizes have been requested on any of them
1180 */
1181 for (size_t j = 0; j < streamList->num_streams; j++) {
1182 bool sizeFound = false;
1183 camera3_stream_t *newStream = streamList->streams[j];
1184
1185 uint32_t rotatedHeight = newStream->height;
1186 uint32_t rotatedWidth = newStream->width;
1187 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1188 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1189 rotatedHeight = newStream->width;
1190 rotatedWidth = newStream->height;
1191 }
1192
1193 /*
1194 * Sizes are different for each type of stream format check against
1195 * appropriate table.
1196 */
1197 switch (newStream->format) {
1198 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1199 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1200 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001201 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1202 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1203 mPDSupported) {
1204 if ((depthWidth == newStream->width) &&
1205 (depthHeight == newStream->height)) {
1206 sizeFound = true;
1207 }
1208 break;
1209 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001210 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1211 for (size_t i = 0; i < count; i++) {
1212 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1213 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1214 sizeFound = true;
1215 break;
1216 }
1217 }
1218 break;
1219 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001220 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1221 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001222 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001223 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001224 if ((depthSamplesCount == newStream->width) &&
1225 (1 == newStream->height)) {
1226 sizeFound = true;
1227 }
1228 break;
1229 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001230 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1231 /* Verify set size against generated sizes table */
1232 for (size_t i = 0; i < count; i++) {
1233 if (((int32_t)rotatedWidth ==
1234 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1235 ((int32_t)rotatedHeight ==
1236 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1237 sizeFound = true;
1238 break;
1239 }
1240 }
1241 break;
1242 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1243 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1244 default:
1245 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1246 || newStream->stream_type == CAMERA3_STREAM_INPUT
1247 || IS_USAGE_ZSL(newStream->usage)) {
1248 if (((int32_t)rotatedWidth ==
1249 gCamCapability[mCameraId]->active_array_size.width) &&
1250 ((int32_t)rotatedHeight ==
1251 gCamCapability[mCameraId]->active_array_size.height)) {
1252 sizeFound = true;
1253 break;
1254 }
1255 /* We could potentially break here to enforce ZSL stream
1256 * set from frameworks always is full active array size
1257 * but it is not clear from the spc if framework will always
1258 * follow that, also we have logic to override to full array
1259 * size, so keeping the logic lenient at the moment
1260 */
1261 }
1262 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1263 MAX_SIZES_CNT);
1264 for (size_t i = 0; i < count; i++) {
1265 if (((int32_t)rotatedWidth ==
1266 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1267 ((int32_t)rotatedHeight ==
1268 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1269 sizeFound = true;
1270 break;
1271 }
1272 }
1273 break;
1274 } /* End of switch(newStream->format) */
1275
1276 /* We error out even if a single stream has unsupported size set */
1277 if (!sizeFound) {
1278 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1279 rotatedWidth, rotatedHeight, newStream->format,
1280 gCamCapability[mCameraId]->active_array_size.width,
1281 gCamCapability[mCameraId]->active_array_size.height);
1282 rc = -EINVAL;
1283 break;
1284 }
1285 } /* End of for each stream */
1286 return rc;
1287}
1288
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001289/*===========================================================================
1290 * FUNCTION : validateUsageFlags
1291 *
1292 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1293 *
1294 * PARAMETERS :
1295 * @stream_list : streams to be configured
1296 *
1297 * RETURN :
1298 * NO_ERROR if the usage flags are supported
1299 * error code if usage flags are not supported
1300 *
1301 *==========================================================================*/
1302int QCamera3HardwareInterface::validateUsageFlags(
1303 const camera3_stream_configuration_t* streamList)
1304{
1305 for (size_t j = 0; j < streamList->num_streams; j++) {
1306 const camera3_stream_t *newStream = streamList->streams[j];
1307
1308 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1309 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1310 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1311 continue;
1312 }
1313
1314 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1315 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1316 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1317 bool forcePreviewUBWC = true;
1318 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1319 forcePreviewUBWC = false;
1320 }
1321 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1322 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1323 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1324 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1325 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1326 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1327
1328 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1329 // So color spaces will always match.
1330
1331 // Check whether underlying formats of shared streams match.
1332 if (isVideo && isPreview && videoFormat != previewFormat) {
1333 LOGE("Combined video and preview usage flag is not supported");
1334 return -EINVAL;
1335 }
1336 if (isPreview && isZSL && previewFormat != zslFormat) {
1337 LOGE("Combined preview and zsl usage flag is not supported");
1338 return -EINVAL;
1339 }
1340 if (isVideo && isZSL && videoFormat != zslFormat) {
1341 LOGE("Combined video and zsl usage flag is not supported");
1342 return -EINVAL;
1343 }
1344 }
1345 return NO_ERROR;
1346}
1347
1348/*===========================================================================
1349 * FUNCTION : validateUsageFlagsForEis
1350 *
1351 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1352 *
1353 * PARAMETERS :
1354 * @stream_list : streams to be configured
1355 *
1356 * RETURN :
1357 * NO_ERROR if the usage flags are supported
1358 * error code if usage flags are not supported
1359 *
1360 *==========================================================================*/
1361int QCamera3HardwareInterface::validateUsageFlagsForEis(
1362 const camera3_stream_configuration_t* streamList)
1363{
1364 for (size_t j = 0; j < streamList->num_streams; j++) {
1365 const camera3_stream_t *newStream = streamList->streams[j];
1366
1367 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1368 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1369
1370 // Because EIS is "hard-coded" for certain use case, and current
1371 // implementation doesn't support shared preview and video on the same
1372 // stream, return failure if EIS is forced on.
1373 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1374 LOGE("Combined video and preview usage flag is not supported due to EIS");
1375 return -EINVAL;
1376 }
1377 }
1378 return NO_ERROR;
1379}
1380
Thierry Strudel3d639192016-09-09 11:52:26 -07001381/*==============================================================================
1382 * FUNCTION : isSupportChannelNeeded
1383 *
1384 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1385 *
1386 * PARAMETERS :
1387 * @stream_list : streams to be configured
1388 * @stream_config_info : the config info for streams to be configured
1389 *
1390 * RETURN : Boolen true/false decision
1391 *
1392 *==========================================================================*/
1393bool QCamera3HardwareInterface::isSupportChannelNeeded(
1394 camera3_stream_configuration_t *streamList,
1395 cam_stream_size_info_t stream_config_info)
1396{
1397 uint32_t i;
1398 bool pprocRequested = false;
1399 /* Check for conditions where PProc pipeline does not have any streams*/
1400 for (i = 0; i < stream_config_info.num_streams; i++) {
1401 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1402 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1403 pprocRequested = true;
1404 break;
1405 }
1406 }
1407
1408 if (pprocRequested == false )
1409 return true;
1410
1411 /* Dummy stream needed if only raw or jpeg streams present */
1412 for (i = 0; i < streamList->num_streams; i++) {
1413 switch(streamList->streams[i]->format) {
1414 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1415 case HAL_PIXEL_FORMAT_RAW10:
1416 case HAL_PIXEL_FORMAT_RAW16:
1417 case HAL_PIXEL_FORMAT_BLOB:
1418 break;
1419 default:
1420 return false;
1421 }
1422 }
1423 return true;
1424}
1425
1426/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001427 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001428 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001429 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001430 *
1431 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001432 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001433 *
1434 * RETURN : int32_t type of status
1435 * NO_ERROR -- success
1436 * none-zero failure code
1437 *
1438 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001439int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001440{
1441 int32_t rc = NO_ERROR;
1442
1443 cam_dimension_t max_dim = {0, 0};
1444 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1445 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1446 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1447 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1448 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1449 }
1450
1451 clear_metadata_buffer(mParameters);
1452
1453 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1454 max_dim);
1455 if (rc != NO_ERROR) {
1456 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1457 return rc;
1458 }
1459
1460 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1461 if (rc != NO_ERROR) {
1462 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1463 return rc;
1464 }
1465
1466 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001467 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001468
1469 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1470 mParameters);
1471 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001472 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001473 return rc;
1474 }
1475
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001476 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001477 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1478 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1479 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1480 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1481 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001482
1483 return rc;
1484}
1485
1486/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001487 * FUNCTION : addToPPFeatureMask
1488 *
1489 * DESCRIPTION: add additional features to pp feature mask based on
1490 * stream type and usecase
1491 *
1492 * PARAMETERS :
1493 * @stream_format : stream type for feature mask
1494 * @stream_idx : stream idx within postprocess_mask list to change
1495 *
1496 * RETURN : NULL
1497 *
1498 *==========================================================================*/
1499void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1500 uint32_t stream_idx)
1501{
1502 char feature_mask_value[PROPERTY_VALUE_MAX];
1503 cam_feature_mask_t feature_mask;
1504 int args_converted;
1505 int property_len;
1506
1507 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001508#ifdef _LE_CAMERA_
1509 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1510 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1511 property_len = property_get("persist.camera.hal3.feature",
1512 feature_mask_value, swtnr_feature_mask_value);
1513#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001514 property_len = property_get("persist.camera.hal3.feature",
1515 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001516#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001517 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1518 (feature_mask_value[1] == 'x')) {
1519 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1520 } else {
1521 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1522 }
1523 if (1 != args_converted) {
1524 feature_mask = 0;
1525 LOGE("Wrong feature mask %s", feature_mask_value);
1526 return;
1527 }
1528
1529 switch (stream_format) {
1530 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1531 /* Add LLVD to pp feature mask only if video hint is enabled */
1532 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1533 mStreamConfigInfo.postprocess_mask[stream_idx]
1534 |= CAM_QTI_FEATURE_SW_TNR;
1535 LOGH("Added SW TNR to pp feature mask");
1536 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1537 mStreamConfigInfo.postprocess_mask[stream_idx]
1538 |= CAM_QCOM_FEATURE_LLVD;
1539 LOGH("Added LLVD SeeMore to pp feature mask");
1540 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001541 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1542 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1543 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1544 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001545 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1546 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1547 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1548 CAM_QTI_FEATURE_BINNING_CORRECTION;
1549 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001550 break;
1551 }
1552 default:
1553 break;
1554 }
1555 LOGD("PP feature mask %llx",
1556 mStreamConfigInfo.postprocess_mask[stream_idx]);
1557}
1558
1559/*==============================================================================
1560 * FUNCTION : updateFpsInPreviewBuffer
1561 *
1562 * DESCRIPTION: update FPS information in preview buffer.
1563 *
1564 * PARAMETERS :
1565 * @metadata : pointer to metadata buffer
1566 * @frame_number: frame_number to look for in pending buffer list
1567 *
1568 * RETURN : None
1569 *
1570 *==========================================================================*/
1571void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1572 uint32_t frame_number)
1573{
1574 // Mark all pending buffers for this particular request
1575 // with corresponding framerate information
1576 for (List<PendingBuffersInRequest>::iterator req =
1577 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1578 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1579 for(List<PendingBufferInfo>::iterator j =
1580 req->mPendingBufferList.begin();
1581 j != req->mPendingBufferList.end(); j++) {
1582 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1583 if ((req->frame_number == frame_number) &&
1584 (channel->getStreamTypeMask() &
1585 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1586 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1587 CAM_INTF_PARM_FPS_RANGE, metadata) {
1588 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1589 struct private_handle_t *priv_handle =
1590 (struct private_handle_t *)(*(j->buffer));
1591 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1592 }
1593 }
1594 }
1595 }
1596}
1597
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001598/*==============================================================================
1599 * FUNCTION : updateTimeStampInPendingBuffers
1600 *
1601 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1602 * of a frame number
1603 *
1604 * PARAMETERS :
1605 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1606 * @timestamp : timestamp to be set
1607 *
1608 * RETURN : None
1609 *
1610 *==========================================================================*/
1611void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1612 uint32_t frameNumber, nsecs_t timestamp)
1613{
1614 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1615 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1616 if (req->frame_number != frameNumber)
1617 continue;
1618
1619 for (auto k = req->mPendingBufferList.begin();
1620 k != req->mPendingBufferList.end(); k++ ) {
1621 struct private_handle_t *priv_handle =
1622 (struct private_handle_t *) (*(k->buffer));
1623 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1624 }
1625 }
1626 return;
1627}
1628
Thierry Strudel3d639192016-09-09 11:52:26 -07001629/*===========================================================================
1630 * FUNCTION : configureStreams
1631 *
1632 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1633 * and output streams.
1634 *
1635 * PARAMETERS :
1636 * @stream_list : streams to be configured
1637 *
1638 * RETURN :
1639 *
1640 *==========================================================================*/
1641int QCamera3HardwareInterface::configureStreams(
1642 camera3_stream_configuration_t *streamList)
1643{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001644 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001645 int rc = 0;
1646
1647 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001648 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001649 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001650 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001651
1652 return rc;
1653}
1654
1655/*===========================================================================
1656 * FUNCTION : configureStreamsPerfLocked
1657 *
1658 * DESCRIPTION: configureStreams while perfLock is held.
1659 *
1660 * PARAMETERS :
1661 * @stream_list : streams to be configured
1662 *
1663 * RETURN : int32_t type of status
1664 * NO_ERROR -- success
1665 * none-zero failure code
1666 *==========================================================================*/
1667int QCamera3HardwareInterface::configureStreamsPerfLocked(
1668 camera3_stream_configuration_t *streamList)
1669{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001670 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001671 int rc = 0;
1672
1673 // Sanity check stream_list
1674 if (streamList == NULL) {
1675 LOGE("NULL stream configuration");
1676 return BAD_VALUE;
1677 }
1678 if (streamList->streams == NULL) {
1679 LOGE("NULL stream list");
1680 return BAD_VALUE;
1681 }
1682
1683 if (streamList->num_streams < 1) {
1684 LOGE("Bad number of streams requested: %d",
1685 streamList->num_streams);
1686 return BAD_VALUE;
1687 }
1688
1689 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1690 LOGE("Maximum number of streams %d exceeded: %d",
1691 MAX_NUM_STREAMS, streamList->num_streams);
1692 return BAD_VALUE;
1693 }
1694
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001695 rc = validateUsageFlags(streamList);
1696 if (rc != NO_ERROR) {
1697 return rc;
1698 }
1699
Thierry Strudel3d639192016-09-09 11:52:26 -07001700 mOpMode = streamList->operation_mode;
1701 LOGD("mOpMode: %d", mOpMode);
1702
1703 /* first invalidate all the steams in the mStreamList
1704 * if they appear again, they will be validated */
1705 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1706 it != mStreamInfo.end(); it++) {
1707 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1708 if (channel) {
1709 channel->stop();
1710 }
1711 (*it)->status = INVALID;
1712 }
1713
1714 if (mRawDumpChannel) {
1715 mRawDumpChannel->stop();
1716 delete mRawDumpChannel;
1717 mRawDumpChannel = NULL;
1718 }
1719
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001720 if (mHdrPlusRawSrcChannel) {
1721 mHdrPlusRawSrcChannel->stop();
1722 delete mHdrPlusRawSrcChannel;
1723 mHdrPlusRawSrcChannel = NULL;
1724 }
1725
Thierry Strudel3d639192016-09-09 11:52:26 -07001726 if (mSupportChannel)
1727 mSupportChannel->stop();
1728
1729 if (mAnalysisChannel) {
1730 mAnalysisChannel->stop();
1731 }
1732 if (mMetadataChannel) {
1733 /* If content of mStreamInfo is not 0, there is metadata stream */
1734 mMetadataChannel->stop();
1735 }
1736 if (mChannelHandle) {
1737 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1738 mChannelHandle);
1739 LOGD("stopping channel %d", mChannelHandle);
1740 }
1741
1742 pthread_mutex_lock(&mMutex);
1743
1744 // Check state
1745 switch (mState) {
1746 case INITIALIZED:
1747 case CONFIGURED:
1748 case STARTED:
1749 /* valid state */
1750 break;
1751 default:
1752 LOGE("Invalid state %d", mState);
1753 pthread_mutex_unlock(&mMutex);
1754 return -ENODEV;
1755 }
1756
1757 /* Check whether we have video stream */
1758 m_bIs4KVideo = false;
1759 m_bIsVideo = false;
1760 m_bEisSupportedSize = false;
1761 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001762 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001763 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001764 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001765 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001766 uint32_t videoWidth = 0U;
1767 uint32_t videoHeight = 0U;
1768 size_t rawStreamCnt = 0;
1769 size_t stallStreamCnt = 0;
1770 size_t processedStreamCnt = 0;
1771 // Number of streams on ISP encoder path
1772 size_t numStreamsOnEncoder = 0;
1773 size_t numYuv888OnEncoder = 0;
1774 bool bYuv888OverrideJpeg = false;
1775 cam_dimension_t largeYuv888Size = {0, 0};
1776 cam_dimension_t maxViewfinderSize = {0, 0};
1777 bool bJpegExceeds4K = false;
1778 bool bJpegOnEncoder = false;
1779 bool bUseCommonFeatureMask = false;
1780 cam_feature_mask_t commonFeatureMask = 0;
1781 bool bSmallJpegSize = false;
1782 uint32_t width_ratio;
1783 uint32_t height_ratio;
1784 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1785 camera3_stream_t *inputStream = NULL;
1786 bool isJpeg = false;
1787 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001788 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001789 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001790
1791 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1792
1793 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001794 uint8_t eis_prop_set;
1795 uint32_t maxEisWidth = 0;
1796 uint32_t maxEisHeight = 0;
1797
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001798 // Initialize all instant AEC related variables
1799 mInstantAEC = false;
1800 mResetInstantAEC = false;
1801 mInstantAECSettledFrameNumber = 0;
1802 mAecSkipDisplayFrameBound = 0;
1803 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001804 mCurrFeatureState = 0;
1805 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001806
Thierry Strudel3d639192016-09-09 11:52:26 -07001807 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1808
1809 size_t count = IS_TYPE_MAX;
1810 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1811 for (size_t i = 0; i < count; i++) {
1812 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001813 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1814 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001815 break;
1816 }
1817 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001818
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001819 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001820 maxEisWidth = MAX_EIS_WIDTH;
1821 maxEisHeight = MAX_EIS_HEIGHT;
1822 }
1823
1824 /* EIS setprop control */
1825 char eis_prop[PROPERTY_VALUE_MAX];
1826 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001827 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001828 eis_prop_set = (uint8_t)atoi(eis_prop);
1829
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001830 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001831 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1832
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001833 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1834 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001835
Thierry Strudel3d639192016-09-09 11:52:26 -07001836 /* stream configurations */
1837 for (size_t i = 0; i < streamList->num_streams; i++) {
1838 camera3_stream_t *newStream = streamList->streams[i];
1839 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1840 "height = %d, rotation = %d, usage = 0x%x",
1841 i, newStream->stream_type, newStream->format,
1842 newStream->width, newStream->height, newStream->rotation,
1843 newStream->usage);
1844 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1845 newStream->stream_type == CAMERA3_STREAM_INPUT){
1846 isZsl = true;
1847 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001848 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1849 IS_USAGE_PREVIEW(newStream->usage)) {
1850 isPreview = true;
1851 }
1852
Thierry Strudel3d639192016-09-09 11:52:26 -07001853 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1854 inputStream = newStream;
1855 }
1856
Emilian Peev7650c122017-01-19 08:24:33 -08001857 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1858 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001859 isJpeg = true;
1860 jpegSize.width = newStream->width;
1861 jpegSize.height = newStream->height;
1862 if (newStream->width > VIDEO_4K_WIDTH ||
1863 newStream->height > VIDEO_4K_HEIGHT)
1864 bJpegExceeds4K = true;
1865 }
1866
1867 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1868 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1869 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001870 // In HAL3 we can have multiple different video streams.
1871 // The variables video width and height are used below as
1872 // dimensions of the biggest of them
1873 if (videoWidth < newStream->width ||
1874 videoHeight < newStream->height) {
1875 videoWidth = newStream->width;
1876 videoHeight = newStream->height;
1877 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001878 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1879 (VIDEO_4K_HEIGHT <= newStream->height)) {
1880 m_bIs4KVideo = true;
1881 }
1882 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1883 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001884
Thierry Strudel3d639192016-09-09 11:52:26 -07001885 }
1886 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1887 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1888 switch (newStream->format) {
1889 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001890 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1891 depthPresent = true;
1892 break;
1893 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001894 stallStreamCnt++;
1895 if (isOnEncoder(maxViewfinderSize, newStream->width,
1896 newStream->height)) {
1897 numStreamsOnEncoder++;
1898 bJpegOnEncoder = true;
1899 }
1900 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1901 newStream->width);
1902 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1903 newStream->height);;
1904 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1905 "FATAL: max_downscale_factor cannot be zero and so assert");
1906 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1907 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1908 LOGH("Setting small jpeg size flag to true");
1909 bSmallJpegSize = true;
1910 }
1911 break;
1912 case HAL_PIXEL_FORMAT_RAW10:
1913 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1914 case HAL_PIXEL_FORMAT_RAW16:
1915 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001916 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1917 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1918 pdStatCount++;
1919 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001920 break;
1921 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1922 processedStreamCnt++;
1923 if (isOnEncoder(maxViewfinderSize, newStream->width,
1924 newStream->height)) {
1925 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1926 !IS_USAGE_ZSL(newStream->usage)) {
1927 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1928 }
1929 numStreamsOnEncoder++;
1930 }
1931 break;
1932 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1933 processedStreamCnt++;
1934 if (isOnEncoder(maxViewfinderSize, newStream->width,
1935 newStream->height)) {
1936 // If Yuv888 size is not greater than 4K, set feature mask
1937 // to SUPERSET so that it support concurrent request on
1938 // YUV and JPEG.
1939 if (newStream->width <= VIDEO_4K_WIDTH &&
1940 newStream->height <= VIDEO_4K_HEIGHT) {
1941 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1942 }
1943 numStreamsOnEncoder++;
1944 numYuv888OnEncoder++;
1945 largeYuv888Size.width = newStream->width;
1946 largeYuv888Size.height = newStream->height;
1947 }
1948 break;
1949 default:
1950 processedStreamCnt++;
1951 if (isOnEncoder(maxViewfinderSize, newStream->width,
1952 newStream->height)) {
1953 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1954 numStreamsOnEncoder++;
1955 }
1956 break;
1957 }
1958
1959 }
1960 }
1961
1962 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1963 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1964 !m_bIsVideo) {
1965 m_bEisEnable = false;
1966 }
1967
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001968 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1969 pthread_mutex_unlock(&mMutex);
1970 return -EINVAL;
1971 }
1972
Thierry Strudel54dc9782017-02-15 12:12:10 -08001973 uint8_t forceEnableTnr = 0;
1974 char tnr_prop[PROPERTY_VALUE_MAX];
1975 memset(tnr_prop, 0, sizeof(tnr_prop));
1976 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1977 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1978
Thierry Strudel3d639192016-09-09 11:52:26 -07001979 /* Logic to enable/disable TNR based on specific config size/etc.*/
1980 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1981 ((videoWidth == 1920 && videoHeight == 1080) ||
1982 (videoWidth == 1280 && videoHeight == 720)) &&
1983 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1984 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001985 else if (forceEnableTnr)
1986 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001987
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001988 char videoHdrProp[PROPERTY_VALUE_MAX];
1989 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1990 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1991 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1992
1993 if (hdr_mode_prop == 1 && m_bIsVideo &&
1994 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1995 m_bVideoHdrEnabled = true;
1996 else
1997 m_bVideoHdrEnabled = false;
1998
1999
Thierry Strudel3d639192016-09-09 11:52:26 -07002000 /* Check if num_streams is sane */
2001 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2002 rawStreamCnt > MAX_RAW_STREAMS ||
2003 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2004 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2005 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2006 pthread_mutex_unlock(&mMutex);
2007 return -EINVAL;
2008 }
2009 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002010 if (isZsl && m_bIs4KVideo) {
2011 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002012 pthread_mutex_unlock(&mMutex);
2013 return -EINVAL;
2014 }
2015 /* Check if stream sizes are sane */
2016 if (numStreamsOnEncoder > 2) {
2017 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2018 pthread_mutex_unlock(&mMutex);
2019 return -EINVAL;
2020 } else if (1 < numStreamsOnEncoder){
2021 bUseCommonFeatureMask = true;
2022 LOGH("Multiple streams above max viewfinder size, common mask needed");
2023 }
2024
2025 /* Check if BLOB size is greater than 4k in 4k recording case */
2026 if (m_bIs4KVideo && bJpegExceeds4K) {
2027 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2028 pthread_mutex_unlock(&mMutex);
2029 return -EINVAL;
2030 }
2031
Emilian Peev7650c122017-01-19 08:24:33 -08002032 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2033 depthPresent) {
2034 LOGE("HAL doesn't support depth streams in HFR mode!");
2035 pthread_mutex_unlock(&mMutex);
2036 return -EINVAL;
2037 }
2038
Thierry Strudel3d639192016-09-09 11:52:26 -07002039 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2040 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2041 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2042 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2043 // configurations:
2044 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2045 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2046 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2047 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2048 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2049 __func__);
2050 pthread_mutex_unlock(&mMutex);
2051 return -EINVAL;
2052 }
2053
2054 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2055 // the YUV stream's size is greater or equal to the JPEG size, set common
2056 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2057 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2058 jpegSize.width, jpegSize.height) &&
2059 largeYuv888Size.width > jpegSize.width &&
2060 largeYuv888Size.height > jpegSize.height) {
2061 bYuv888OverrideJpeg = true;
2062 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2063 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2064 }
2065
2066 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2067 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2068 commonFeatureMask);
2069 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2070 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2071
2072 rc = validateStreamDimensions(streamList);
2073 if (rc == NO_ERROR) {
2074 rc = validateStreamRotations(streamList);
2075 }
2076 if (rc != NO_ERROR) {
2077 LOGE("Invalid stream configuration requested!");
2078 pthread_mutex_unlock(&mMutex);
2079 return rc;
2080 }
2081
Emilian Peev0f3c3162017-03-15 12:57:46 +00002082 if (1 < pdStatCount) {
2083 LOGE("HAL doesn't support multiple PD streams");
2084 pthread_mutex_unlock(&mMutex);
2085 return -EINVAL;
2086 }
2087
2088 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2089 (1 == pdStatCount)) {
2090 LOGE("HAL doesn't support PD streams in HFR mode!");
2091 pthread_mutex_unlock(&mMutex);
2092 return -EINVAL;
2093 }
2094
Thierry Strudel3d639192016-09-09 11:52:26 -07002095 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2096 for (size_t i = 0; i < streamList->num_streams; i++) {
2097 camera3_stream_t *newStream = streamList->streams[i];
2098 LOGH("newStream type = %d, stream format = %d "
2099 "stream size : %d x %d, stream rotation = %d",
2100 newStream->stream_type, newStream->format,
2101 newStream->width, newStream->height, newStream->rotation);
2102 //if the stream is in the mStreamList validate it
2103 bool stream_exists = false;
2104 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2105 it != mStreamInfo.end(); it++) {
2106 if ((*it)->stream == newStream) {
2107 QCamera3ProcessingChannel *channel =
2108 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2109 stream_exists = true;
2110 if (channel)
2111 delete channel;
2112 (*it)->status = VALID;
2113 (*it)->stream->priv = NULL;
2114 (*it)->channel = NULL;
2115 }
2116 }
2117 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2118 //new stream
2119 stream_info_t* stream_info;
2120 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2121 if (!stream_info) {
2122 LOGE("Could not allocate stream info");
2123 rc = -ENOMEM;
2124 pthread_mutex_unlock(&mMutex);
2125 return rc;
2126 }
2127 stream_info->stream = newStream;
2128 stream_info->status = VALID;
2129 stream_info->channel = NULL;
2130 mStreamInfo.push_back(stream_info);
2131 }
2132 /* Covers Opaque ZSL and API1 F/W ZSL */
2133 if (IS_USAGE_ZSL(newStream->usage)
2134 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2135 if (zslStream != NULL) {
2136 LOGE("Multiple input/reprocess streams requested!");
2137 pthread_mutex_unlock(&mMutex);
2138 return BAD_VALUE;
2139 }
2140 zslStream = newStream;
2141 }
2142 /* Covers YUV reprocess */
2143 if (inputStream != NULL) {
2144 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2145 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2146 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2147 && inputStream->width == newStream->width
2148 && inputStream->height == newStream->height) {
2149 if (zslStream != NULL) {
2150 /* This scenario indicates multiple YUV streams with same size
2151 * as input stream have been requested, since zsl stream handle
2152 * is solely use for the purpose of overriding the size of streams
2153 * which share h/w streams we will just make a guess here as to
2154 * which of the stream is a ZSL stream, this will be refactored
2155 * once we make generic logic for streams sharing encoder output
2156 */
2157 LOGH("Warning, Multiple ip/reprocess streams requested!");
2158 }
2159 zslStream = newStream;
2160 }
2161 }
2162 }
2163
2164 /* If a zsl stream is set, we know that we have configured at least one input or
2165 bidirectional stream */
2166 if (NULL != zslStream) {
2167 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2168 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2169 mInputStreamInfo.format = zslStream->format;
2170 mInputStreamInfo.usage = zslStream->usage;
2171 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2172 mInputStreamInfo.dim.width,
2173 mInputStreamInfo.dim.height,
2174 mInputStreamInfo.format, mInputStreamInfo.usage);
2175 }
2176
2177 cleanAndSortStreamInfo();
2178 if (mMetadataChannel) {
2179 delete mMetadataChannel;
2180 mMetadataChannel = NULL;
2181 }
2182 if (mSupportChannel) {
2183 delete mSupportChannel;
2184 mSupportChannel = NULL;
2185 }
2186
2187 if (mAnalysisChannel) {
2188 delete mAnalysisChannel;
2189 mAnalysisChannel = NULL;
2190 }
2191
2192 if (mDummyBatchChannel) {
2193 delete mDummyBatchChannel;
2194 mDummyBatchChannel = NULL;
2195 }
2196
Emilian Peev7650c122017-01-19 08:24:33 -08002197 if (mDepthChannel) {
2198 mDepthChannel = NULL;
2199 }
2200
Thierry Strudel2896d122017-02-23 19:18:03 -08002201 char is_type_value[PROPERTY_VALUE_MAX];
2202 property_get("persist.camera.is_type", is_type_value, "4");
2203 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2204
Thierry Strudel3d639192016-09-09 11:52:26 -07002205 //Create metadata channel and initialize it
2206 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2207 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2208 gCamCapability[mCameraId]->color_arrangement);
2209 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2210 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002211 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002212 if (mMetadataChannel == NULL) {
2213 LOGE("failed to allocate metadata channel");
2214 rc = -ENOMEM;
2215 pthread_mutex_unlock(&mMutex);
2216 return rc;
2217 }
2218 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2219 if (rc < 0) {
2220 LOGE("metadata channel initialization failed");
2221 delete mMetadataChannel;
2222 mMetadataChannel = NULL;
2223 pthread_mutex_unlock(&mMutex);
2224 return rc;
2225 }
2226
Thierry Strudel2896d122017-02-23 19:18:03 -08002227 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002228 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002229 bool onlyRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002230 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2231 /* Allocate channel objects for the requested streams */
2232 for (size_t i = 0; i < streamList->num_streams; i++) {
2233 camera3_stream_t *newStream = streamList->streams[i];
2234 uint32_t stream_usage = newStream->usage;
2235 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2236 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2237 struct camera_info *p_info = NULL;
2238 pthread_mutex_lock(&gCamLock);
2239 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2240 pthread_mutex_unlock(&gCamLock);
2241 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2242 || IS_USAGE_ZSL(newStream->usage)) &&
2243 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002244 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002245 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002246 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2247 if (bUseCommonFeatureMask)
2248 zsl_ppmask = commonFeatureMask;
2249 else
2250 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002251 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002252 if (numStreamsOnEncoder > 0)
2253 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2254 else
2255 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002256 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002257 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002258 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002259 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002260 LOGH("Input stream configured, reprocess config");
2261 } else {
2262 //for non zsl streams find out the format
2263 switch (newStream->format) {
2264 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2265 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002266 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002267 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2268 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2269 /* add additional features to pp feature mask */
2270 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2271 mStreamConfigInfo.num_streams);
2272
2273 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2274 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2275 CAM_STREAM_TYPE_VIDEO;
2276 if (m_bTnrEnabled && m_bTnrVideo) {
2277 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2278 CAM_QCOM_FEATURE_CPP_TNR;
2279 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2280 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2281 ~CAM_QCOM_FEATURE_CDS;
2282 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002283 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2284 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2285 CAM_QTI_FEATURE_PPEISCORE;
2286 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002287 } else {
2288 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2289 CAM_STREAM_TYPE_PREVIEW;
2290 if (m_bTnrEnabled && m_bTnrPreview) {
2291 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2292 CAM_QCOM_FEATURE_CPP_TNR;
2293 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2294 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2295 ~CAM_QCOM_FEATURE_CDS;
2296 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002297 if(!m_bSwTnrPreview) {
2298 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2299 ~CAM_QTI_FEATURE_SW_TNR;
2300 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002301 padding_info.width_padding = mSurfaceStridePadding;
2302 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002303 previewSize.width = (int32_t)newStream->width;
2304 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002305 }
2306 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2307 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2308 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2309 newStream->height;
2310 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2311 newStream->width;
2312 }
2313 }
2314 break;
2315 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002316 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002317 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2318 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2319 if (bUseCommonFeatureMask)
2320 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2321 commonFeatureMask;
2322 else
2323 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2324 CAM_QCOM_FEATURE_NONE;
2325 } else {
2326 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2327 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2328 }
2329 break;
2330 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002331 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002332 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2333 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2334 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2335 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2336 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002337 /* Remove rotation if it is not supported
2338 for 4K LiveVideo snapshot case (online processing) */
2339 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2340 CAM_QCOM_FEATURE_ROTATION)) {
2341 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2342 &= ~CAM_QCOM_FEATURE_ROTATION;
2343 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002344 } else {
2345 if (bUseCommonFeatureMask &&
2346 isOnEncoder(maxViewfinderSize, newStream->width,
2347 newStream->height)) {
2348 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2349 } else {
2350 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2351 }
2352 }
2353 if (isZsl) {
2354 if (zslStream) {
2355 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2356 (int32_t)zslStream->width;
2357 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2358 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002359 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2360 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002361 } else {
2362 LOGE("Error, No ZSL stream identified");
2363 pthread_mutex_unlock(&mMutex);
2364 return -EINVAL;
2365 }
2366 } else if (m_bIs4KVideo) {
2367 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2368 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2369 } else if (bYuv888OverrideJpeg) {
2370 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2371 (int32_t)largeYuv888Size.width;
2372 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2373 (int32_t)largeYuv888Size.height;
2374 }
2375 break;
2376 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2377 case HAL_PIXEL_FORMAT_RAW16:
2378 case HAL_PIXEL_FORMAT_RAW10:
2379 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2380 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2381 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002382 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2383 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2384 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2385 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2386 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2387 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2388 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2389 gCamCapability[mCameraId]->dt[mPDIndex];
2390 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2391 gCamCapability[mCameraId]->vc[mPDIndex];
2392 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002393 break;
2394 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002395 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002396 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2397 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2398 break;
2399 }
2400 }
2401
2402 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2403 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2404 gCamCapability[mCameraId]->color_arrangement);
2405
2406 if (newStream->priv == NULL) {
2407 //New stream, construct channel
2408 switch (newStream->stream_type) {
2409 case CAMERA3_STREAM_INPUT:
2410 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2411 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2412 break;
2413 case CAMERA3_STREAM_BIDIRECTIONAL:
2414 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2415 GRALLOC_USAGE_HW_CAMERA_WRITE;
2416 break;
2417 case CAMERA3_STREAM_OUTPUT:
2418 /* For video encoding stream, set read/write rarely
2419 * flag so that they may be set to un-cached */
2420 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2421 newStream->usage |=
2422 (GRALLOC_USAGE_SW_READ_RARELY |
2423 GRALLOC_USAGE_SW_WRITE_RARELY |
2424 GRALLOC_USAGE_HW_CAMERA_WRITE);
2425 else if (IS_USAGE_ZSL(newStream->usage))
2426 {
2427 LOGD("ZSL usage flag skipping");
2428 }
2429 else if (newStream == zslStream
2430 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2431 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2432 } else
2433 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2434 break;
2435 default:
2436 LOGE("Invalid stream_type %d", newStream->stream_type);
2437 break;
2438 }
2439
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002440 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002441 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2442 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2443 QCamera3ProcessingChannel *channel = NULL;
2444 switch (newStream->format) {
2445 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2446 if ((newStream->usage &
2447 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2448 (streamList->operation_mode ==
2449 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2450 ) {
2451 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2452 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002453 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002454 this,
2455 newStream,
2456 (cam_stream_type_t)
2457 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2458 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2459 mMetadataChannel,
2460 0); //heap buffers are not required for HFR video channel
2461 if (channel == NULL) {
2462 LOGE("allocation of channel failed");
2463 pthread_mutex_unlock(&mMutex);
2464 return -ENOMEM;
2465 }
2466 //channel->getNumBuffers() will return 0 here so use
2467 //MAX_INFLIGH_HFR_REQUESTS
2468 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2469 newStream->priv = channel;
2470 LOGI("num video buffers in HFR mode: %d",
2471 MAX_INFLIGHT_HFR_REQUESTS);
2472 } else {
2473 /* Copy stream contents in HFR preview only case to create
2474 * dummy batch channel so that sensor streaming is in
2475 * HFR mode */
2476 if (!m_bIsVideo && (streamList->operation_mode ==
2477 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2478 mDummyBatchStream = *newStream;
2479 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002480 int bufferCount = MAX_INFLIGHT_REQUESTS;
2481 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2482 CAM_STREAM_TYPE_VIDEO) {
2483 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2484 bufferCount = MAX_VIDEO_BUFFERS;
2485 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002486 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2487 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002488 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002489 this,
2490 newStream,
2491 (cam_stream_type_t)
2492 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2493 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2494 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002495 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002496 if (channel == NULL) {
2497 LOGE("allocation of channel failed");
2498 pthread_mutex_unlock(&mMutex);
2499 return -ENOMEM;
2500 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002501 /* disable UBWC for preview, though supported,
2502 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002503 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002504 (previewSize.width == (int32_t)videoWidth)&&
2505 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002506 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002507 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002508 channel->setUBWCEnabled(forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002509 newStream->max_buffers = channel->getNumBuffers();
2510 newStream->priv = channel;
2511 }
2512 break;
2513 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2514 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2515 mChannelHandle,
2516 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002517 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002518 this,
2519 newStream,
2520 (cam_stream_type_t)
2521 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2522 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2523 mMetadataChannel);
2524 if (channel == NULL) {
2525 LOGE("allocation of YUV channel failed");
2526 pthread_mutex_unlock(&mMutex);
2527 return -ENOMEM;
2528 }
2529 newStream->max_buffers = channel->getNumBuffers();
2530 newStream->priv = channel;
2531 break;
2532 }
2533 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2534 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002535 case HAL_PIXEL_FORMAT_RAW10: {
2536 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2537 (HAL_DATASPACE_DEPTH != newStream->data_space))
2538 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002539 mRawChannel = new QCamera3RawChannel(
2540 mCameraHandle->camera_handle, mChannelHandle,
2541 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002542 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002543 this, newStream,
2544 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002545 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002546 if (mRawChannel == NULL) {
2547 LOGE("allocation of raw channel failed");
2548 pthread_mutex_unlock(&mMutex);
2549 return -ENOMEM;
2550 }
2551 newStream->max_buffers = mRawChannel->getNumBuffers();
2552 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2553 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002554 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002555 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002556 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2557 mDepthChannel = new QCamera3DepthChannel(
2558 mCameraHandle->camera_handle, mChannelHandle,
2559 mCameraHandle->ops, NULL, NULL, &padding_info,
2560 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2561 mMetadataChannel);
2562 if (NULL == mDepthChannel) {
2563 LOGE("Allocation of depth channel failed");
2564 pthread_mutex_unlock(&mMutex);
2565 return NO_MEMORY;
2566 }
2567 newStream->priv = mDepthChannel;
2568 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2569 } else {
2570 // Max live snapshot inflight buffer is 1. This is to mitigate
2571 // frame drop issues for video snapshot. The more buffers being
2572 // allocated, the more frame drops there are.
2573 mPictureChannel = new QCamera3PicChannel(
2574 mCameraHandle->camera_handle, mChannelHandle,
2575 mCameraHandle->ops, captureResultCb,
2576 setBufferErrorStatus, &padding_info, this, newStream,
2577 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2578 m_bIs4KVideo, isZsl, mMetadataChannel,
2579 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2580 if (mPictureChannel == NULL) {
2581 LOGE("allocation of channel failed");
2582 pthread_mutex_unlock(&mMutex);
2583 return -ENOMEM;
2584 }
2585 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2586 newStream->max_buffers = mPictureChannel->getNumBuffers();
2587 mPictureChannel->overrideYuvSize(
2588 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2589 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002590 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002591 break;
2592
2593 default:
2594 LOGE("not a supported format 0x%x", newStream->format);
2595 break;
2596 }
2597 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2598 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2599 } else {
2600 LOGE("Error, Unknown stream type");
2601 pthread_mutex_unlock(&mMutex);
2602 return -EINVAL;
2603 }
2604
2605 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002606 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2607 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002608 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002609 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002610 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2611 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2612 }
2613 }
2614
2615 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2616 it != mStreamInfo.end(); it++) {
2617 if ((*it)->stream == newStream) {
2618 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2619 break;
2620 }
2621 }
2622 } else {
2623 // Channel already exists for this stream
2624 // Do nothing for now
2625 }
2626 padding_info = gCamCapability[mCameraId]->padding_info;
2627
Emilian Peev7650c122017-01-19 08:24:33 -08002628 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002629 * since there is no real stream associated with it
2630 */
Emilian Peev7650c122017-01-19 08:24:33 -08002631 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002632 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2633 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002634 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002635 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002636 }
2637
Thierry Strudel2896d122017-02-23 19:18:03 -08002638 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2639 onlyRaw = false;
2640 }
2641
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002642 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002643 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002644 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002645 cam_analysis_info_t analysisInfo;
2646 int32_t ret = NO_ERROR;
2647 ret = mCommon.getAnalysisInfo(
2648 FALSE,
2649 analysisFeatureMask,
2650 &analysisInfo);
2651 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002652 cam_color_filter_arrangement_t analysis_color_arrangement =
2653 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2654 CAM_FILTER_ARRANGEMENT_Y :
2655 gCamCapability[mCameraId]->color_arrangement);
2656 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2657 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002658 cam_dimension_t analysisDim;
2659 analysisDim = mCommon.getMatchingDimension(previewSize,
2660 analysisInfo.analysis_recommended_res);
2661
2662 mAnalysisChannel = new QCamera3SupportChannel(
2663 mCameraHandle->camera_handle,
2664 mChannelHandle,
2665 mCameraHandle->ops,
2666 &analysisInfo.analysis_padding_info,
2667 analysisFeatureMask,
2668 CAM_STREAM_TYPE_ANALYSIS,
2669 &analysisDim,
2670 (analysisInfo.analysis_format
2671 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2672 : CAM_FORMAT_YUV_420_NV21),
2673 analysisInfo.hw_analysis_supported,
2674 gCamCapability[mCameraId]->color_arrangement,
2675 this,
2676 0); // force buffer count to 0
2677 } else {
2678 LOGW("getAnalysisInfo failed, ret = %d", ret);
2679 }
2680 if (!mAnalysisChannel) {
2681 LOGW("Analysis channel cannot be created");
2682 }
2683 }
2684
Thierry Strudel3d639192016-09-09 11:52:26 -07002685 //RAW DUMP channel
2686 if (mEnableRawDump && isRawStreamRequested == false){
2687 cam_dimension_t rawDumpSize;
2688 rawDumpSize = getMaxRawSize(mCameraId);
2689 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2690 setPAAFSupport(rawDumpFeatureMask,
2691 CAM_STREAM_TYPE_RAW,
2692 gCamCapability[mCameraId]->color_arrangement);
2693 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2694 mChannelHandle,
2695 mCameraHandle->ops,
2696 rawDumpSize,
2697 &padding_info,
2698 this, rawDumpFeatureMask);
2699 if (!mRawDumpChannel) {
2700 LOGE("Raw Dump channel cannot be created");
2701 pthread_mutex_unlock(&mMutex);
2702 return -ENOMEM;
2703 }
2704 }
2705
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 if (mAnalysisChannel) {
2707 cam_analysis_info_t analysisInfo;
2708 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2709 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2710 CAM_STREAM_TYPE_ANALYSIS;
2711 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2712 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002713 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002714 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2715 &analysisInfo);
2716 if (rc != NO_ERROR) {
2717 LOGE("getAnalysisInfo failed, ret = %d", rc);
2718 pthread_mutex_unlock(&mMutex);
2719 return rc;
2720 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002721 cam_color_filter_arrangement_t analysis_color_arrangement =
2722 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2723 CAM_FILTER_ARRANGEMENT_Y :
2724 gCamCapability[mCameraId]->color_arrangement);
2725 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2726 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2727 analysis_color_arrangement);
2728
Thierry Strudel3d639192016-09-09 11:52:26 -07002729 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002730 mCommon.getMatchingDimension(previewSize,
2731 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002732 mStreamConfigInfo.num_streams++;
2733 }
2734
Thierry Strudel2896d122017-02-23 19:18:03 -08002735 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002736 cam_analysis_info_t supportInfo;
2737 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2738 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2739 setPAAFSupport(callbackFeatureMask,
2740 CAM_STREAM_TYPE_CALLBACK,
2741 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002742 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002743 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002744 if (ret != NO_ERROR) {
2745 /* Ignore the error for Mono camera
2746 * because the PAAF bit mask is only set
2747 * for CAM_STREAM_TYPE_ANALYSIS stream type
2748 */
2749 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2750 LOGW("getAnalysisInfo failed, ret = %d", ret);
2751 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002752 }
2753 mSupportChannel = new QCamera3SupportChannel(
2754 mCameraHandle->camera_handle,
2755 mChannelHandle,
2756 mCameraHandle->ops,
2757 &gCamCapability[mCameraId]->padding_info,
2758 callbackFeatureMask,
2759 CAM_STREAM_TYPE_CALLBACK,
2760 &QCamera3SupportChannel::kDim,
2761 CAM_FORMAT_YUV_420_NV21,
2762 supportInfo.hw_analysis_supported,
2763 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002764 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002765 if (!mSupportChannel) {
2766 LOGE("dummy channel cannot be created");
2767 pthread_mutex_unlock(&mMutex);
2768 return -ENOMEM;
2769 }
2770 }
2771
2772 if (mSupportChannel) {
2773 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2774 QCamera3SupportChannel::kDim;
2775 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2776 CAM_STREAM_TYPE_CALLBACK;
2777 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2778 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2779 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2780 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2781 gCamCapability[mCameraId]->color_arrangement);
2782 mStreamConfigInfo.num_streams++;
2783 }
2784
2785 if (mRawDumpChannel) {
2786 cam_dimension_t rawSize;
2787 rawSize = getMaxRawSize(mCameraId);
2788 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2789 rawSize;
2790 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2791 CAM_STREAM_TYPE_RAW;
2792 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2793 CAM_QCOM_FEATURE_NONE;
2794 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2795 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2796 gCamCapability[mCameraId]->color_arrangement);
2797 mStreamConfigInfo.num_streams++;
2798 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002799
2800 if (mHdrPlusRawSrcChannel) {
2801 cam_dimension_t rawSize;
2802 rawSize = getMaxRawSize(mCameraId);
2803 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2804 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2805 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2806 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2807 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2808 gCamCapability[mCameraId]->color_arrangement);
2809 mStreamConfigInfo.num_streams++;
2810 }
2811
Thierry Strudel3d639192016-09-09 11:52:26 -07002812 /* In HFR mode, if video stream is not added, create a dummy channel so that
2813 * ISP can create a batch mode even for preview only case. This channel is
2814 * never 'start'ed (no stream-on), it is only 'initialized' */
2815 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2816 !m_bIsVideo) {
2817 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2818 setPAAFSupport(dummyFeatureMask,
2819 CAM_STREAM_TYPE_VIDEO,
2820 gCamCapability[mCameraId]->color_arrangement);
2821 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2822 mChannelHandle,
2823 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002824 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002825 this,
2826 &mDummyBatchStream,
2827 CAM_STREAM_TYPE_VIDEO,
2828 dummyFeatureMask,
2829 mMetadataChannel);
2830 if (NULL == mDummyBatchChannel) {
2831 LOGE("creation of mDummyBatchChannel failed."
2832 "Preview will use non-hfr sensor mode ");
2833 }
2834 }
2835 if (mDummyBatchChannel) {
2836 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2837 mDummyBatchStream.width;
2838 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2839 mDummyBatchStream.height;
2840 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2841 CAM_STREAM_TYPE_VIDEO;
2842 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2843 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2844 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2845 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2846 gCamCapability[mCameraId]->color_arrangement);
2847 mStreamConfigInfo.num_streams++;
2848 }
2849
2850 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2851 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002852 m_bIs4KVideo ? 0 :
2853 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002854
2855 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2856 for (pendingRequestIterator i = mPendingRequestsList.begin();
2857 i != mPendingRequestsList.end();) {
2858 i = erasePendingRequest(i);
2859 }
2860 mPendingFrameDropList.clear();
2861 // Initialize/Reset the pending buffers list
2862 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2863 req.mPendingBufferList.clear();
2864 }
2865 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2866
Thierry Strudel3d639192016-09-09 11:52:26 -07002867 mCurJpegMeta.clear();
2868 //Get min frame duration for this streams configuration
2869 deriveMinFrameDuration();
2870
Chien-Yu Chenee335912017-02-09 17:53:20 -08002871 mFirstPreviewIntentSeen = false;
2872
2873 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002874 {
2875 Mutex::Autolock l(gHdrPlusClientLock);
2876 disableHdrPlusModeLocked();
2877 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002878
Thierry Strudel3d639192016-09-09 11:52:26 -07002879 // Update state
2880 mState = CONFIGURED;
2881
2882 pthread_mutex_unlock(&mMutex);
2883
2884 return rc;
2885}
2886
2887/*===========================================================================
2888 * FUNCTION : validateCaptureRequest
2889 *
2890 * DESCRIPTION: validate a capture request from camera service
2891 *
2892 * PARAMETERS :
2893 * @request : request from framework to process
2894 *
2895 * RETURN :
2896 *
2897 *==========================================================================*/
2898int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002899 camera3_capture_request_t *request,
2900 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002901{
2902 ssize_t idx = 0;
2903 const camera3_stream_buffer_t *b;
2904 CameraMetadata meta;
2905
2906 /* Sanity check the request */
2907 if (request == NULL) {
2908 LOGE("NULL capture request");
2909 return BAD_VALUE;
2910 }
2911
2912 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2913 /*settings cannot be null for the first request*/
2914 return BAD_VALUE;
2915 }
2916
2917 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002918 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2919 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002920 LOGE("Request %d: No output buffers provided!",
2921 __FUNCTION__, frameNumber);
2922 return BAD_VALUE;
2923 }
2924 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2925 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2926 request->num_output_buffers, MAX_NUM_STREAMS);
2927 return BAD_VALUE;
2928 }
2929 if (request->input_buffer != NULL) {
2930 b = request->input_buffer;
2931 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2932 LOGE("Request %d: Buffer %ld: Status not OK!",
2933 frameNumber, (long)idx);
2934 return BAD_VALUE;
2935 }
2936 if (b->release_fence != -1) {
2937 LOGE("Request %d: Buffer %ld: Has a release fence!",
2938 frameNumber, (long)idx);
2939 return BAD_VALUE;
2940 }
2941 if (b->buffer == NULL) {
2942 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2943 frameNumber, (long)idx);
2944 return BAD_VALUE;
2945 }
2946 }
2947
2948 // Validate all buffers
2949 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002950 if (b == NULL) {
2951 return BAD_VALUE;
2952 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002953 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002954 QCamera3ProcessingChannel *channel =
2955 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2956 if (channel == NULL) {
2957 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2958 frameNumber, (long)idx);
2959 return BAD_VALUE;
2960 }
2961 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2962 LOGE("Request %d: Buffer %ld: Status not OK!",
2963 frameNumber, (long)idx);
2964 return BAD_VALUE;
2965 }
2966 if (b->release_fence != -1) {
2967 LOGE("Request %d: Buffer %ld: Has a release fence!",
2968 frameNumber, (long)idx);
2969 return BAD_VALUE;
2970 }
2971 if (b->buffer == NULL) {
2972 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2973 frameNumber, (long)idx);
2974 return BAD_VALUE;
2975 }
2976 if (*(b->buffer) == NULL) {
2977 LOGE("Request %d: Buffer %ld: NULL private handle!",
2978 frameNumber, (long)idx);
2979 return BAD_VALUE;
2980 }
2981 idx++;
2982 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002983 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002984 return NO_ERROR;
2985}
2986
2987/*===========================================================================
2988 * FUNCTION : deriveMinFrameDuration
2989 *
2990 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2991 * on currently configured streams.
2992 *
2993 * PARAMETERS : NONE
2994 *
2995 * RETURN : NONE
2996 *
2997 *==========================================================================*/
2998void QCamera3HardwareInterface::deriveMinFrameDuration()
2999{
3000 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3001
3002 maxJpegDim = 0;
3003 maxProcessedDim = 0;
3004 maxRawDim = 0;
3005
3006 // Figure out maximum jpeg, processed, and raw dimensions
3007 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3008 it != mStreamInfo.end(); it++) {
3009
3010 // Input stream doesn't have valid stream_type
3011 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3012 continue;
3013
3014 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3015 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3016 if (dimension > maxJpegDim)
3017 maxJpegDim = dimension;
3018 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3019 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3020 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3021 if (dimension > maxRawDim)
3022 maxRawDim = dimension;
3023 } else {
3024 if (dimension > maxProcessedDim)
3025 maxProcessedDim = dimension;
3026 }
3027 }
3028
3029 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3030 MAX_SIZES_CNT);
3031
3032 //Assume all jpeg dimensions are in processed dimensions.
3033 if (maxJpegDim > maxProcessedDim)
3034 maxProcessedDim = maxJpegDim;
3035 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3036 if (maxProcessedDim > maxRawDim) {
3037 maxRawDim = INT32_MAX;
3038
3039 for (size_t i = 0; i < count; i++) {
3040 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3041 gCamCapability[mCameraId]->raw_dim[i].height;
3042 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3043 maxRawDim = dimension;
3044 }
3045 }
3046
3047 //Find minimum durations for processed, jpeg, and raw
3048 for (size_t i = 0; i < count; i++) {
3049 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3050 gCamCapability[mCameraId]->raw_dim[i].height) {
3051 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3052 break;
3053 }
3054 }
3055 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3056 for (size_t i = 0; i < count; i++) {
3057 if (maxProcessedDim ==
3058 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3059 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3060 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3061 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3062 break;
3063 }
3064 }
3065}
3066
3067/*===========================================================================
3068 * FUNCTION : getMinFrameDuration
3069 *
3070 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3071 * and current request configuration.
3072 *
3073 * PARAMETERS : @request: requset sent by the frameworks
3074 *
3075 * RETURN : min farme duration for a particular request
3076 *
3077 *==========================================================================*/
3078int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3079{
3080 bool hasJpegStream = false;
3081 bool hasRawStream = false;
3082 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3083 const camera3_stream_t *stream = request->output_buffers[i].stream;
3084 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3085 hasJpegStream = true;
3086 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3087 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3088 stream->format == HAL_PIXEL_FORMAT_RAW16)
3089 hasRawStream = true;
3090 }
3091
3092 if (!hasJpegStream)
3093 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3094 else
3095 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3096}
3097
3098/*===========================================================================
3099 * FUNCTION : handleBuffersDuringFlushLock
3100 *
3101 * DESCRIPTION: Account for buffers returned from back-end during flush
3102 * This function is executed while mMutex is held by the caller.
3103 *
3104 * PARAMETERS :
3105 * @buffer: image buffer for the callback
3106 *
3107 * RETURN :
3108 *==========================================================================*/
3109void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3110{
3111 bool buffer_found = false;
3112 for (List<PendingBuffersInRequest>::iterator req =
3113 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3114 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3115 for (List<PendingBufferInfo>::iterator i =
3116 req->mPendingBufferList.begin();
3117 i != req->mPendingBufferList.end(); i++) {
3118 if (i->buffer == buffer->buffer) {
3119 mPendingBuffersMap.numPendingBufsAtFlush--;
3120 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3121 buffer->buffer, req->frame_number,
3122 mPendingBuffersMap.numPendingBufsAtFlush);
3123 buffer_found = true;
3124 break;
3125 }
3126 }
3127 if (buffer_found) {
3128 break;
3129 }
3130 }
3131 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3132 //signal the flush()
3133 LOGD("All buffers returned to HAL. Continue flush");
3134 pthread_cond_signal(&mBuffersCond);
3135 }
3136}
3137
Thierry Strudel3d639192016-09-09 11:52:26 -07003138/*===========================================================================
3139 * FUNCTION : handleBatchMetadata
3140 *
3141 * DESCRIPTION: Handles metadata buffer callback in batch mode
3142 *
3143 * PARAMETERS : @metadata_buf: metadata buffer
3144 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3145 * the meta buf in this method
3146 *
3147 * RETURN :
3148 *
3149 *==========================================================================*/
3150void QCamera3HardwareInterface::handleBatchMetadata(
3151 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3152{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003153 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003154
3155 if (NULL == metadata_buf) {
3156 LOGE("metadata_buf is NULL");
3157 return;
3158 }
3159 /* In batch mode, the metdata will contain the frame number and timestamp of
3160 * the last frame in the batch. Eg: a batch containing buffers from request
3161 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3162 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3163 * multiple process_capture_results */
3164 metadata_buffer_t *metadata =
3165 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3166 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3167 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3168 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3169 uint32_t frame_number = 0, urgent_frame_number = 0;
3170 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3171 bool invalid_metadata = false;
3172 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3173 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003174 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003175
3176 int32_t *p_frame_number_valid =
3177 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3178 uint32_t *p_frame_number =
3179 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3180 int64_t *p_capture_time =
3181 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3182 int32_t *p_urgent_frame_number_valid =
3183 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3184 uint32_t *p_urgent_frame_number =
3185 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3186
3187 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3188 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3189 (NULL == p_urgent_frame_number)) {
3190 LOGE("Invalid metadata");
3191 invalid_metadata = true;
3192 } else {
3193 frame_number_valid = *p_frame_number_valid;
3194 last_frame_number = *p_frame_number;
3195 last_frame_capture_time = *p_capture_time;
3196 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3197 last_urgent_frame_number = *p_urgent_frame_number;
3198 }
3199
3200 /* In batchmode, when no video buffers are requested, set_parms are sent
3201 * for every capture_request. The difference between consecutive urgent
3202 * frame numbers and frame numbers should be used to interpolate the
3203 * corresponding frame numbers and time stamps */
3204 pthread_mutex_lock(&mMutex);
3205 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003206 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3207 if(idx < 0) {
3208 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3209 last_urgent_frame_number);
3210 mState = ERROR;
3211 pthread_mutex_unlock(&mMutex);
3212 return;
3213 }
3214 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003215 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3216 first_urgent_frame_number;
3217
3218 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3219 urgent_frame_number_valid,
3220 first_urgent_frame_number, last_urgent_frame_number);
3221 }
3222
3223 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003224 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3225 if(idx < 0) {
3226 LOGE("Invalid frame number received: %d. Irrecoverable error",
3227 last_frame_number);
3228 mState = ERROR;
3229 pthread_mutex_unlock(&mMutex);
3230 return;
3231 }
3232 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003233 frameNumDiff = last_frame_number + 1 -
3234 first_frame_number;
3235 mPendingBatchMap.removeItem(last_frame_number);
3236
3237 LOGD("frm: valid: %d frm_num: %d - %d",
3238 frame_number_valid,
3239 first_frame_number, last_frame_number);
3240
3241 }
3242 pthread_mutex_unlock(&mMutex);
3243
3244 if (urgent_frame_number_valid || frame_number_valid) {
3245 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3246 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3247 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3248 urgentFrameNumDiff, last_urgent_frame_number);
3249 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3250 LOGE("frameNumDiff: %d frameNum: %d",
3251 frameNumDiff, last_frame_number);
3252 }
3253
3254 for (size_t i = 0; i < loopCount; i++) {
3255 /* handleMetadataWithLock is called even for invalid_metadata for
3256 * pipeline depth calculation */
3257 if (!invalid_metadata) {
3258 /* Infer frame number. Batch metadata contains frame number of the
3259 * last frame */
3260 if (urgent_frame_number_valid) {
3261 if (i < urgentFrameNumDiff) {
3262 urgent_frame_number =
3263 first_urgent_frame_number + i;
3264 LOGD("inferred urgent frame_number: %d",
3265 urgent_frame_number);
3266 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3267 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3268 } else {
3269 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3270 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3271 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3272 }
3273 }
3274
3275 /* Infer frame number. Batch metadata contains frame number of the
3276 * last frame */
3277 if (frame_number_valid) {
3278 if (i < frameNumDiff) {
3279 frame_number = first_frame_number + i;
3280 LOGD("inferred frame_number: %d", frame_number);
3281 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3282 CAM_INTF_META_FRAME_NUMBER, frame_number);
3283 } else {
3284 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3285 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3286 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3287 }
3288 }
3289
3290 if (last_frame_capture_time) {
3291 //Infer timestamp
3292 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003293 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003294 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003295 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003296 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3297 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3298 LOGD("batch capture_time: %lld, capture_time: %lld",
3299 last_frame_capture_time, capture_time);
3300 }
3301 }
3302 pthread_mutex_lock(&mMutex);
3303 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003304 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003305 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3306 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003307 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003308 pthread_mutex_unlock(&mMutex);
3309 }
3310
3311 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003312 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003313 mMetadataChannel->bufDone(metadata_buf);
3314 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003315 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003316 }
3317}
3318
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003319void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3320 camera3_error_msg_code_t errorCode)
3321{
3322 camera3_notify_msg_t notify_msg;
3323 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3324 notify_msg.type = CAMERA3_MSG_ERROR;
3325 notify_msg.message.error.error_code = errorCode;
3326 notify_msg.message.error.error_stream = NULL;
3327 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003328 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003329
3330 return;
3331}
Thierry Strudel3d639192016-09-09 11:52:26 -07003332/*===========================================================================
3333 * FUNCTION : handleMetadataWithLock
3334 *
3335 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3336 *
3337 * PARAMETERS : @metadata_buf: metadata buffer
3338 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3339 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003340 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3341 * last urgent metadata in a batch. Always true for non-batch mode
3342 * @lastMetadataInBatch: Boolean to indicate whether this is the
3343 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003344 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3345 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003346 *
3347 * RETURN :
3348 *
3349 *==========================================================================*/
3350void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003351 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003352 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3353 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003354{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003355 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003356 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3357 //during flush do not send metadata from this thread
3358 LOGD("not sending metadata during flush or when mState is error");
3359 if (free_and_bufdone_meta_buf) {
3360 mMetadataChannel->bufDone(metadata_buf);
3361 free(metadata_buf);
3362 }
3363 return;
3364 }
3365
3366 //not in flush
3367 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3368 int32_t frame_number_valid, urgent_frame_number_valid;
3369 uint32_t frame_number, urgent_frame_number;
3370 int64_t capture_time;
3371 nsecs_t currentSysTime;
3372
3373 int32_t *p_frame_number_valid =
3374 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3375 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3376 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3377 int32_t *p_urgent_frame_number_valid =
3378 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3379 uint32_t *p_urgent_frame_number =
3380 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3381 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3382 metadata) {
3383 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3384 *p_frame_number_valid, *p_frame_number);
3385 }
3386
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003387 camera_metadata_t *resultMetadata = nullptr;
3388
Thierry Strudel3d639192016-09-09 11:52:26 -07003389 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3390 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3391 LOGE("Invalid metadata");
3392 if (free_and_bufdone_meta_buf) {
3393 mMetadataChannel->bufDone(metadata_buf);
3394 free(metadata_buf);
3395 }
3396 goto done_metadata;
3397 }
3398 frame_number_valid = *p_frame_number_valid;
3399 frame_number = *p_frame_number;
3400 capture_time = *p_capture_time;
3401 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3402 urgent_frame_number = *p_urgent_frame_number;
3403 currentSysTime = systemTime(CLOCK_MONOTONIC);
3404
3405 // Detect if buffers from any requests are overdue
3406 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003407 int64_t timeout;
3408 {
3409 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3410 // If there is a pending HDR+ request, the following requests may be blocked until the
3411 // HDR+ request is done. So allow a longer timeout.
3412 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3413 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3414 }
3415
3416 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003417 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003418 assert(missed.stream->priv);
3419 if (missed.stream->priv) {
3420 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3421 assert(ch->mStreams[0]);
3422 if (ch->mStreams[0]) {
3423 LOGE("Cancel missing frame = %d, buffer = %p,"
3424 "stream type = %d, stream format = %d",
3425 req.frame_number, missed.buffer,
3426 ch->mStreams[0]->getMyType(), missed.stream->format);
3427 ch->timeoutFrame(req.frame_number);
3428 }
3429 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003430 }
3431 }
3432 }
3433 //Partial result on process_capture_result for timestamp
3434 if (urgent_frame_number_valid) {
3435 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3436 urgent_frame_number, capture_time);
3437
3438 //Recieved an urgent Frame Number, handle it
3439 //using partial results
3440 for (pendingRequestIterator i =
3441 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3442 LOGD("Iterator Frame = %d urgent frame = %d",
3443 i->frame_number, urgent_frame_number);
3444
3445 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3446 (i->partial_result_cnt == 0)) {
3447 LOGE("Error: HAL missed urgent metadata for frame number %d",
3448 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003449 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003450 }
3451
3452 if (i->frame_number == urgent_frame_number &&
3453 i->bUrgentReceived == 0) {
3454
3455 camera3_capture_result_t result;
3456 memset(&result, 0, sizeof(camera3_capture_result_t));
3457
3458 i->partial_result_cnt++;
3459 i->bUrgentReceived = 1;
3460 // Extract 3A metadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003461 result.result = translateCbUrgentMetadataToResultMetadata(
3462 metadata, lastUrgentMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003463 // Populate metadata result
3464 result.frame_number = urgent_frame_number;
3465 result.num_output_buffers = 0;
3466 result.output_buffers = NULL;
3467 result.partial_result = i->partial_result_cnt;
3468
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003469 {
3470 Mutex::Autolock l(gHdrPlusClientLock);
3471 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3472 // Notify HDR+ client about the partial metadata.
3473 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3474 result.partial_result == PARTIAL_RESULT_COUNT);
3475 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003476 }
3477
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003478 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003479 LOGD("urgent frame_number = %u, capture_time = %lld",
3480 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003481 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3482 // Instant AEC settled for this frame.
3483 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3484 mInstantAECSettledFrameNumber = urgent_frame_number;
3485 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003486 free_camera_metadata((camera_metadata_t *)result.result);
3487 break;
3488 }
3489 }
3490 }
3491
3492 if (!frame_number_valid) {
3493 LOGD("Not a valid normal frame number, used as SOF only");
3494 if (free_and_bufdone_meta_buf) {
3495 mMetadataChannel->bufDone(metadata_buf);
3496 free(metadata_buf);
3497 }
3498 goto done_metadata;
3499 }
3500 LOGH("valid frame_number = %u, capture_time = %lld",
3501 frame_number, capture_time);
3502
Emilian Peev7650c122017-01-19 08:24:33 -08003503 if (metadata->is_depth_data_valid) {
3504 handleDepthDataLocked(metadata->depth_data, frame_number);
3505 }
3506
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003507 // Check whether any stream buffer corresponding to this is dropped or not
3508 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3509 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3510 for (auto & pendingRequest : mPendingRequestsList) {
3511 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3512 mInstantAECSettledFrameNumber)) {
3513 camera3_notify_msg_t notify_msg = {};
3514 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003515 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003516 QCamera3ProcessingChannel *channel =
3517 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003518 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003519 if (p_cam_frame_drop) {
3520 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003521 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003522 // Got the stream ID for drop frame.
3523 dropFrame = true;
3524 break;
3525 }
3526 }
3527 } else {
3528 // This is instant AEC case.
3529 // For instant AEC drop the stream untill AEC is settled.
3530 dropFrame = true;
3531 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003532
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003533 if (dropFrame) {
3534 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3535 if (p_cam_frame_drop) {
3536 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003537 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003538 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003539 } else {
3540 // For instant AEC, inform frame drop and frame number
3541 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3542 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003543 pendingRequest.frame_number, streamID,
3544 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003545 }
3546 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003547 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003548 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003549 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003550 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003551 if (p_cam_frame_drop) {
3552 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003553 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003554 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003555 } else {
3556 // For instant AEC, inform frame drop and frame number
3557 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3558 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003559 pendingRequest.frame_number, streamID,
3560 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003561 }
3562 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003563 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003564 PendingFrameDrop.stream_ID = streamID;
3565 // Add the Frame drop info to mPendingFrameDropList
3566 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003567 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003568 }
3569 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003570 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003571
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003572 for (auto & pendingRequest : mPendingRequestsList) {
3573 // Find the pending request with the frame number.
3574 if (pendingRequest.frame_number == frame_number) {
3575 // Update the sensor timestamp.
3576 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003577
Thierry Strudel3d639192016-09-09 11:52:26 -07003578
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003579 /* Set the timestamp in display metadata so that clients aware of
3580 private_handle such as VT can use this un-modified timestamps.
3581 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003582 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003583
Thierry Strudel3d639192016-09-09 11:52:26 -07003584 // Find channel requiring metadata, meaning internal offline postprocess
3585 // is needed.
3586 //TODO: for now, we don't support two streams requiring metadata at the same time.
3587 // (because we are not making copies, and metadata buffer is not reference counted.
3588 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003589 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3590 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003591 if (iter->need_metadata) {
3592 internalPproc = true;
3593 QCamera3ProcessingChannel *channel =
3594 (QCamera3ProcessingChannel *)iter->stream->priv;
3595 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003596 if(p_is_metabuf_queued != NULL) {
3597 *p_is_metabuf_queued = true;
3598 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003599 break;
3600 }
3601 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003602 for (auto itr = pendingRequest.internalRequestList.begin();
3603 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003604 if (itr->need_metadata) {
3605 internalPproc = true;
3606 QCamera3ProcessingChannel *channel =
3607 (QCamera3ProcessingChannel *)itr->stream->priv;
3608 channel->queueReprocMetadata(metadata_buf);
3609 break;
3610 }
3611 }
3612
Thierry Strudel54dc9782017-02-15 12:12:10 -08003613 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003614 resultMetadata = translateFromHalMetadata(metadata,
3615 pendingRequest.timestamp, pendingRequest.request_id,
3616 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3617 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003618 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003619 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003620 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003621 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003622 internalPproc, pendingRequest.fwkCacMode,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003623 lastMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003624
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003625 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003626
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003627 if (pendingRequest.blob_request) {
3628 //Dump tuning metadata if enabled and available
3629 char prop[PROPERTY_VALUE_MAX];
3630 memset(prop, 0, sizeof(prop));
3631 property_get("persist.camera.dumpmetadata", prop, "0");
3632 int32_t enabled = atoi(prop);
3633 if (enabled && metadata->is_tuning_params_valid) {
3634 dumpMetadataToFile(metadata->tuning_params,
3635 mMetaFrameCount,
3636 enabled,
3637 "Snapshot",
3638 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003639 }
3640 }
3641
3642 if (!internalPproc) {
3643 LOGD("couldn't find need_metadata for this metadata");
3644 // Return metadata buffer
3645 if (free_and_bufdone_meta_buf) {
3646 mMetadataChannel->bufDone(metadata_buf);
3647 free(metadata_buf);
3648 }
3649 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003650
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003651 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003652 }
3653 }
3654
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003655 // Try to send out shutter callbacks and capture results.
3656 handlePendingResultsWithLock(frame_number, resultMetadata);
3657 return;
3658
Thierry Strudel3d639192016-09-09 11:52:26 -07003659done_metadata:
3660 for (pendingRequestIterator i = mPendingRequestsList.begin();
3661 i != mPendingRequestsList.end() ;i++) {
3662 i->pipeline_depth++;
3663 }
3664 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3665 unblockRequestIfNecessary();
3666}
3667
3668/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003669 * FUNCTION : handleDepthDataWithLock
3670 *
3671 * DESCRIPTION: Handles incoming depth data
3672 *
3673 * PARAMETERS : @depthData : Depth data
3674 * @frameNumber: Frame number of the incoming depth data
3675 *
3676 * RETURN :
3677 *
3678 *==========================================================================*/
3679void QCamera3HardwareInterface::handleDepthDataLocked(
3680 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3681 uint32_t currentFrameNumber;
3682 buffer_handle_t *depthBuffer;
3683
3684 if (nullptr == mDepthChannel) {
3685 LOGE("Depth channel not present!");
3686 return;
3687 }
3688
3689 camera3_stream_buffer_t resultBuffer =
3690 {.acquire_fence = -1,
3691 .release_fence = -1,
3692 .status = CAMERA3_BUFFER_STATUS_OK,
3693 .buffer = nullptr,
3694 .stream = mDepthChannel->getStream()};
3695 camera3_capture_result_t result =
3696 {.result = nullptr,
3697 .num_output_buffers = 1,
3698 .output_buffers = &resultBuffer,
3699 .partial_result = 0,
3700 .frame_number = 0};
3701
3702 do {
3703 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3704 if (nullptr == depthBuffer) {
3705 break;
3706 }
3707
3708 result.frame_number = currentFrameNumber;
3709 resultBuffer.buffer = depthBuffer;
3710 if (currentFrameNumber == frameNumber) {
3711 int32_t rc = mDepthChannel->populateDepthData(depthData,
3712 frameNumber);
3713 if (NO_ERROR != rc) {
3714 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3715 } else {
3716 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3717 }
3718 } else if (currentFrameNumber > frameNumber) {
3719 break;
3720 } else {
3721 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3722 {{currentFrameNumber, mDepthChannel->getStream(),
3723 CAMERA3_MSG_ERROR_BUFFER}}};
3724 orchestrateNotify(&notify_msg);
3725
3726 LOGE("Depth buffer for frame number: %d is missing "
3727 "returning back!", currentFrameNumber);
3728 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3729 }
3730 mDepthChannel->unmapBuffer(currentFrameNumber);
3731
3732 orchestrateResult(&result);
3733 } while (currentFrameNumber < frameNumber);
3734}
3735
3736/*===========================================================================
3737 * FUNCTION : notifyErrorFoPendingDepthData
3738 *
3739 * DESCRIPTION: Returns error for any pending depth buffers
3740 *
3741 * PARAMETERS : depthCh - depth channel that needs to get flushed
3742 *
3743 * RETURN :
3744 *
3745 *==========================================================================*/
3746void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3747 QCamera3DepthChannel *depthCh) {
3748 uint32_t currentFrameNumber;
3749 buffer_handle_t *depthBuffer;
3750
3751 if (nullptr == depthCh) {
3752 return;
3753 }
3754
3755 camera3_notify_msg_t notify_msg =
3756 {.type = CAMERA3_MSG_ERROR,
3757 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3758 camera3_stream_buffer_t resultBuffer =
3759 {.acquire_fence = -1,
3760 .release_fence = -1,
3761 .buffer = nullptr,
3762 .stream = depthCh->getStream(),
3763 .status = CAMERA3_BUFFER_STATUS_ERROR};
3764 camera3_capture_result_t result =
3765 {.result = nullptr,
3766 .frame_number = 0,
3767 .num_output_buffers = 1,
3768 .partial_result = 0,
3769 .output_buffers = &resultBuffer};
3770
3771 while (nullptr !=
3772 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3773 depthCh->unmapBuffer(currentFrameNumber);
3774
3775 notify_msg.message.error.frame_number = currentFrameNumber;
3776 orchestrateNotify(&notify_msg);
3777
3778 resultBuffer.buffer = depthBuffer;
3779 result.frame_number = currentFrameNumber;
3780 orchestrateResult(&result);
3781 };
3782}
3783
3784/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003785 * FUNCTION : hdrPlusPerfLock
3786 *
3787 * DESCRIPTION: perf lock for HDR+ using custom intent
3788 *
3789 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3790 *
3791 * RETURN : None
3792 *
3793 *==========================================================================*/
3794void QCamera3HardwareInterface::hdrPlusPerfLock(
3795 mm_camera_super_buf_t *metadata_buf)
3796{
3797 if (NULL == metadata_buf) {
3798 LOGE("metadata_buf is NULL");
3799 return;
3800 }
3801 metadata_buffer_t *metadata =
3802 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3803 int32_t *p_frame_number_valid =
3804 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3805 uint32_t *p_frame_number =
3806 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3807
3808 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3809 LOGE("%s: Invalid metadata", __func__);
3810 return;
3811 }
3812
3813 //acquire perf lock for 5 sec after the last HDR frame is captured
3814 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3815 if ((p_frame_number != NULL) &&
3816 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003817 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003818 }
3819 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003820}
3821
3822/*===========================================================================
3823 * FUNCTION : handleInputBufferWithLock
3824 *
3825 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3826 *
3827 * PARAMETERS : @frame_number: frame number of the input buffer
3828 *
3829 * RETURN :
3830 *
3831 *==========================================================================*/
3832void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3833{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003834 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003835 pendingRequestIterator i = mPendingRequestsList.begin();
3836 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3837 i++;
3838 }
3839 if (i != mPendingRequestsList.end() && i->input_buffer) {
3840 //found the right request
3841 if (!i->shutter_notified) {
3842 CameraMetadata settings;
3843 camera3_notify_msg_t notify_msg;
3844 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3845 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3846 if(i->settings) {
3847 settings = i->settings;
3848 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3849 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3850 } else {
3851 LOGE("No timestamp in input settings! Using current one.");
3852 }
3853 } else {
3854 LOGE("Input settings missing!");
3855 }
3856
3857 notify_msg.type = CAMERA3_MSG_SHUTTER;
3858 notify_msg.message.shutter.frame_number = frame_number;
3859 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003860 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003861 i->shutter_notified = true;
3862 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3863 i->frame_number, notify_msg.message.shutter.timestamp);
3864 }
3865
3866 if (i->input_buffer->release_fence != -1) {
3867 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3868 close(i->input_buffer->release_fence);
3869 if (rc != OK) {
3870 LOGE("input buffer sync wait failed %d", rc);
3871 }
3872 }
3873
3874 camera3_capture_result result;
3875 memset(&result, 0, sizeof(camera3_capture_result));
3876 result.frame_number = frame_number;
3877 result.result = i->settings;
3878 result.input_buffer = i->input_buffer;
3879 result.partial_result = PARTIAL_RESULT_COUNT;
3880
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003881 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003882 LOGD("Input request metadata and input buffer frame_number = %u",
3883 i->frame_number);
3884 i = erasePendingRequest(i);
3885 } else {
3886 LOGE("Could not find input request for frame number %d", frame_number);
3887 }
3888}
3889
3890/*===========================================================================
3891 * FUNCTION : handleBufferWithLock
3892 *
3893 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3894 *
3895 * PARAMETERS : @buffer: image buffer for the callback
3896 * @frame_number: frame number of the image buffer
3897 *
3898 * RETURN :
3899 *
3900 *==========================================================================*/
3901void QCamera3HardwareInterface::handleBufferWithLock(
3902 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3903{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003904 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003905
3906 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3907 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3908 }
3909
Thierry Strudel3d639192016-09-09 11:52:26 -07003910 /* Nothing to be done during error state */
3911 if ((ERROR == mState) || (DEINIT == mState)) {
3912 return;
3913 }
3914 if (mFlushPerf) {
3915 handleBuffersDuringFlushLock(buffer);
3916 return;
3917 }
3918 //not in flush
3919 // If the frame number doesn't exist in the pending request list,
3920 // directly send the buffer to the frameworks, and update pending buffers map
3921 // Otherwise, book-keep the buffer.
3922 pendingRequestIterator i = mPendingRequestsList.begin();
3923 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3924 i++;
3925 }
3926 if (i == mPendingRequestsList.end()) {
3927 // Verify all pending requests frame_numbers are greater
3928 for (pendingRequestIterator j = mPendingRequestsList.begin();
3929 j != mPendingRequestsList.end(); j++) {
3930 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3931 LOGW("Error: pending live frame number %d is smaller than %d",
3932 j->frame_number, frame_number);
3933 }
3934 }
3935 camera3_capture_result_t result;
3936 memset(&result, 0, sizeof(camera3_capture_result_t));
3937 result.result = NULL;
3938 result.frame_number = frame_number;
3939 result.num_output_buffers = 1;
3940 result.partial_result = 0;
3941 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3942 m != mPendingFrameDropList.end(); m++) {
3943 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3944 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3945 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3946 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3947 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3948 frame_number, streamID);
3949 m = mPendingFrameDropList.erase(m);
3950 break;
3951 }
3952 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003953 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003954 result.output_buffers = buffer;
3955 LOGH("result frame_number = %d, buffer = %p",
3956 frame_number, buffer->buffer);
3957
3958 mPendingBuffersMap.removeBuf(buffer->buffer);
3959
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003960 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003961 } else {
3962 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003963 if (i->input_buffer->release_fence != -1) {
3964 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3965 close(i->input_buffer->release_fence);
3966 if (rc != OK) {
3967 LOGE("input buffer sync wait failed %d", rc);
3968 }
3969 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003970 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003971
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003972 // Put buffer into the pending request
3973 for (auto &requestedBuffer : i->buffers) {
3974 if (requestedBuffer.stream == buffer->stream) {
3975 if (requestedBuffer.buffer != nullptr) {
3976 LOGE("Error: buffer is already set");
3977 } else {
3978 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3979 sizeof(camera3_stream_buffer_t));
3980 *(requestedBuffer.buffer) = *buffer;
3981 LOGH("cache buffer %p at result frame_number %u",
3982 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003983 }
3984 }
3985 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003986
3987 if (i->input_buffer) {
3988 // For a reprocessing request, try to send out shutter callback and result metadata.
3989 handlePendingResultsWithLock(frame_number, nullptr);
3990 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003991 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003992
3993 if (mPreviewStarted == false) {
3994 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3995 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07003996 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
3997
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003998 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3999 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4000 mPreviewStarted = true;
4001
4002 // Set power hint for preview
4003 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4004 }
4005 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004006}
4007
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004008void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4009 const camera_metadata_t *resultMetadata)
4010{
4011 // Find the pending request for this result metadata.
4012 auto requestIter = mPendingRequestsList.begin();
4013 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4014 requestIter++;
4015 }
4016
4017 if (requestIter == mPendingRequestsList.end()) {
4018 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4019 return;
4020 }
4021
4022 // Update the result metadata
4023 requestIter->resultMetadata = resultMetadata;
4024
4025 // Check what type of request this is.
4026 bool liveRequest = false;
4027 if (requestIter->hdrplus) {
4028 // HDR+ request doesn't have partial results.
4029 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4030 } else if (requestIter->input_buffer != nullptr) {
4031 // Reprocessing request result is the same as settings.
4032 requestIter->resultMetadata = requestIter->settings;
4033 // Reprocessing request doesn't have partial results.
4034 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4035 } else {
4036 liveRequest = true;
4037 requestIter->partial_result_cnt++;
4038 mPendingLiveRequest--;
4039
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004040 {
4041 Mutex::Autolock l(gHdrPlusClientLock);
4042 // For a live request, send the metadata to HDR+ client.
4043 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4044 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4045 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4046 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004047 }
4048 }
4049
4050 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4051 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4052 bool readyToSend = true;
4053
4054 // Iterate through the pending requests to send out shutter callbacks and results that are
4055 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4056 // live requests that don't have result metadata yet.
4057 auto iter = mPendingRequestsList.begin();
4058 while (iter != mPendingRequestsList.end()) {
4059 // Check if current pending request is ready. If it's not ready, the following pending
4060 // requests are also not ready.
4061 if (readyToSend && iter->resultMetadata == nullptr) {
4062 readyToSend = false;
4063 }
4064
4065 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4066
4067 std::vector<camera3_stream_buffer_t> outputBuffers;
4068
4069 camera3_capture_result_t result = {};
4070 result.frame_number = iter->frame_number;
4071 result.result = iter->resultMetadata;
4072 result.partial_result = iter->partial_result_cnt;
4073
4074 // If this pending buffer has result metadata, we may be able to send out shutter callback
4075 // and result metadata.
4076 if (iter->resultMetadata != nullptr) {
4077 if (!readyToSend) {
4078 // If any of the previous pending request is not ready, this pending request is
4079 // also not ready to send in order to keep shutter callbacks and result metadata
4080 // in order.
4081 iter++;
4082 continue;
4083 }
4084
4085 // Invoke shutter callback if not yet.
4086 if (!iter->shutter_notified) {
4087 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4088
4089 // Find the timestamp in HDR+ result metadata
4090 camera_metadata_ro_entry_t entry;
4091 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4092 ANDROID_SENSOR_TIMESTAMP, &entry);
4093 if (res != OK) {
4094 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4095 __FUNCTION__, iter->frame_number, strerror(-res), res);
4096 } else {
4097 timestamp = entry.data.i64[0];
4098 }
4099
4100 camera3_notify_msg_t notify_msg = {};
4101 notify_msg.type = CAMERA3_MSG_SHUTTER;
4102 notify_msg.message.shutter.frame_number = iter->frame_number;
4103 notify_msg.message.shutter.timestamp = timestamp;
4104 orchestrateNotify(&notify_msg);
4105 iter->shutter_notified = true;
4106 }
4107
4108 result.input_buffer = iter->input_buffer;
4109
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004110 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4111 // If the result metadata belongs to a live request, notify errors for previous pending
4112 // live requests.
4113 mPendingLiveRequest--;
4114
4115 CameraMetadata dummyMetadata;
4116 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4117 result.result = dummyMetadata.release();
4118
4119 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004120
4121 // partial_result should be PARTIAL_RESULT_CNT in case of
4122 // ERROR_RESULT.
4123 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4124 result.partial_result = PARTIAL_RESULT_COUNT;
4125
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004126 } else {
4127 iter++;
4128 continue;
4129 }
4130
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004131 // Prepare output buffer array
4132 for (auto bufferInfoIter = iter->buffers.begin();
4133 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4134 if (bufferInfoIter->buffer != nullptr) {
4135
4136 QCamera3Channel *channel =
4137 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4138 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4139
4140 // Check if this buffer is a dropped frame.
4141 auto frameDropIter = mPendingFrameDropList.begin();
4142 while (frameDropIter != mPendingFrameDropList.end()) {
4143 if((frameDropIter->stream_ID == streamID) &&
4144 (frameDropIter->frame_number == frameNumber)) {
4145 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4146 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4147 streamID);
4148 mPendingFrameDropList.erase(frameDropIter);
4149 break;
4150 } else {
4151 frameDropIter++;
4152 }
4153 }
4154
4155 // Check buffer error status
4156 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4157 bufferInfoIter->buffer->buffer);
4158 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4159
4160 outputBuffers.push_back(*(bufferInfoIter->buffer));
4161 free(bufferInfoIter->buffer);
4162 bufferInfoIter->buffer = NULL;
4163 }
4164 }
4165
4166 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4167 result.num_output_buffers = outputBuffers.size();
4168
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004169 orchestrateResult(&result);
4170
4171 // For reprocessing, result metadata is the same as settings so do not free it here to
4172 // avoid double free.
4173 if (result.result != iter->settings) {
4174 free_camera_metadata((camera_metadata_t *)result.result);
4175 }
4176 iter->resultMetadata = nullptr;
4177 iter = erasePendingRequest(iter);
4178 }
4179
4180 if (liveRequest) {
4181 for (auto &iter : mPendingRequestsList) {
4182 // Increment pipeline depth for the following pending requests.
4183 if (iter.frame_number > frameNumber) {
4184 iter.pipeline_depth++;
4185 }
4186 }
4187 }
4188
4189 unblockRequestIfNecessary();
4190}
4191
Thierry Strudel3d639192016-09-09 11:52:26 -07004192/*===========================================================================
4193 * FUNCTION : unblockRequestIfNecessary
4194 *
4195 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4196 * that mMutex is held when this function is called.
4197 *
4198 * PARAMETERS :
4199 *
4200 * RETURN :
4201 *
4202 *==========================================================================*/
4203void QCamera3HardwareInterface::unblockRequestIfNecessary()
4204{
4205 // Unblock process_capture_request
4206 pthread_cond_signal(&mRequestCond);
4207}
4208
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004209/*===========================================================================
4210 * FUNCTION : isHdrSnapshotRequest
4211 *
4212 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4213 *
4214 * PARAMETERS : camera3 request structure
4215 *
4216 * RETURN : boolean decision variable
4217 *
4218 *==========================================================================*/
4219bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4220{
4221 if (request == NULL) {
4222 LOGE("Invalid request handle");
4223 assert(0);
4224 return false;
4225 }
4226
4227 if (!mForceHdrSnapshot) {
4228 CameraMetadata frame_settings;
4229 frame_settings = request->settings;
4230
4231 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4232 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4233 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4234 return false;
4235 }
4236 } else {
4237 return false;
4238 }
4239
4240 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4241 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4242 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4243 return false;
4244 }
4245 } else {
4246 return false;
4247 }
4248 }
4249
4250 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4251 if (request->output_buffers[i].stream->format
4252 == HAL_PIXEL_FORMAT_BLOB) {
4253 return true;
4254 }
4255 }
4256
4257 return false;
4258}
4259/*===========================================================================
4260 * FUNCTION : orchestrateRequest
4261 *
4262 * DESCRIPTION: Orchestrates a capture request from camera service
4263 *
4264 * PARAMETERS :
4265 * @request : request from framework to process
4266 *
4267 * RETURN : Error status codes
4268 *
4269 *==========================================================================*/
4270int32_t QCamera3HardwareInterface::orchestrateRequest(
4271 camera3_capture_request_t *request)
4272{
4273
4274 uint32_t originalFrameNumber = request->frame_number;
4275 uint32_t originalOutputCount = request->num_output_buffers;
4276 const camera_metadata_t *original_settings = request->settings;
4277 List<InternalRequest> internallyRequestedStreams;
4278 List<InternalRequest> emptyInternalList;
4279
4280 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4281 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4282 uint32_t internalFrameNumber;
4283 CameraMetadata modified_meta;
4284
4285
4286 /* Add Blob channel to list of internally requested streams */
4287 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4288 if (request->output_buffers[i].stream->format
4289 == HAL_PIXEL_FORMAT_BLOB) {
4290 InternalRequest streamRequested;
4291 streamRequested.meteringOnly = 1;
4292 streamRequested.need_metadata = 0;
4293 streamRequested.stream = request->output_buffers[i].stream;
4294 internallyRequestedStreams.push_back(streamRequested);
4295 }
4296 }
4297 request->num_output_buffers = 0;
4298 auto itr = internallyRequestedStreams.begin();
4299
4300 /* Modify setting to set compensation */
4301 modified_meta = request->settings;
4302 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4303 uint8_t aeLock = 1;
4304 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4305 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4306 camera_metadata_t *modified_settings = modified_meta.release();
4307 request->settings = modified_settings;
4308
4309 /* Capture Settling & -2x frame */
4310 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4311 request->frame_number = internalFrameNumber;
4312 processCaptureRequest(request, internallyRequestedStreams);
4313
4314 request->num_output_buffers = originalOutputCount;
4315 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4316 request->frame_number = internalFrameNumber;
4317 processCaptureRequest(request, emptyInternalList);
4318 request->num_output_buffers = 0;
4319
4320 modified_meta = modified_settings;
4321 expCompensation = 0;
4322 aeLock = 1;
4323 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4324 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4325 modified_settings = modified_meta.release();
4326 request->settings = modified_settings;
4327
4328 /* Capture Settling & 0X frame */
4329
4330 itr = internallyRequestedStreams.begin();
4331 if (itr == internallyRequestedStreams.end()) {
4332 LOGE("Error Internally Requested Stream list is empty");
4333 assert(0);
4334 } else {
4335 itr->need_metadata = 0;
4336 itr->meteringOnly = 1;
4337 }
4338
4339 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4340 request->frame_number = internalFrameNumber;
4341 processCaptureRequest(request, internallyRequestedStreams);
4342
4343 itr = internallyRequestedStreams.begin();
4344 if (itr == internallyRequestedStreams.end()) {
4345 ALOGE("Error Internally Requested Stream list is empty");
4346 assert(0);
4347 } else {
4348 itr->need_metadata = 1;
4349 itr->meteringOnly = 0;
4350 }
4351
4352 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4353 request->frame_number = internalFrameNumber;
4354 processCaptureRequest(request, internallyRequestedStreams);
4355
4356 /* Capture 2X frame*/
4357 modified_meta = modified_settings;
4358 expCompensation = GB_HDR_2X_STEP_EV;
4359 aeLock = 1;
4360 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4361 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4362 modified_settings = modified_meta.release();
4363 request->settings = modified_settings;
4364
4365 itr = internallyRequestedStreams.begin();
4366 if (itr == internallyRequestedStreams.end()) {
4367 ALOGE("Error Internally Requested Stream list is empty");
4368 assert(0);
4369 } else {
4370 itr->need_metadata = 0;
4371 itr->meteringOnly = 1;
4372 }
4373 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4374 request->frame_number = internalFrameNumber;
4375 processCaptureRequest(request, internallyRequestedStreams);
4376
4377 itr = internallyRequestedStreams.begin();
4378 if (itr == internallyRequestedStreams.end()) {
4379 ALOGE("Error Internally Requested Stream list is empty");
4380 assert(0);
4381 } else {
4382 itr->need_metadata = 1;
4383 itr->meteringOnly = 0;
4384 }
4385
4386 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4387 request->frame_number = internalFrameNumber;
4388 processCaptureRequest(request, internallyRequestedStreams);
4389
4390
4391 /* Capture 2X on original streaming config*/
4392 internallyRequestedStreams.clear();
4393
4394 /* Restore original settings pointer */
4395 request->settings = original_settings;
4396 } else {
4397 uint32_t internalFrameNumber;
4398 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4399 request->frame_number = internalFrameNumber;
4400 return processCaptureRequest(request, internallyRequestedStreams);
4401 }
4402
4403 return NO_ERROR;
4404}
4405
4406/*===========================================================================
4407 * FUNCTION : orchestrateResult
4408 *
4409 * DESCRIPTION: Orchestrates a capture result to camera service
4410 *
4411 * PARAMETERS :
4412 * @request : request from framework to process
4413 *
4414 * RETURN :
4415 *
4416 *==========================================================================*/
4417void QCamera3HardwareInterface::orchestrateResult(
4418 camera3_capture_result_t *result)
4419{
4420 uint32_t frameworkFrameNumber;
4421 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4422 frameworkFrameNumber);
4423 if (rc != NO_ERROR) {
4424 LOGE("Cannot find translated frameworkFrameNumber");
4425 assert(0);
4426 } else {
4427 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004428 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004429 } else {
4430 result->frame_number = frameworkFrameNumber;
4431 mCallbackOps->process_capture_result(mCallbackOps, result);
4432 }
4433 }
4434}
4435
4436/*===========================================================================
4437 * FUNCTION : orchestrateNotify
4438 *
4439 * DESCRIPTION: Orchestrates a notify to camera service
4440 *
4441 * PARAMETERS :
4442 * @request : request from framework to process
4443 *
4444 * RETURN :
4445 *
4446 *==========================================================================*/
4447void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4448{
4449 uint32_t frameworkFrameNumber;
4450 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004451 int32_t rc = NO_ERROR;
4452
4453 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004454 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004455
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004456 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004457 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4458 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4459 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004460 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004461 LOGE("Cannot find translated frameworkFrameNumber");
4462 assert(0);
4463 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004464 }
4465 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004466
4467 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4468 LOGD("Internal Request drop the notifyCb");
4469 } else {
4470 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4471 mCallbackOps->notify(mCallbackOps, notify_msg);
4472 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004473}
4474
4475/*===========================================================================
4476 * FUNCTION : FrameNumberRegistry
4477 *
4478 * DESCRIPTION: Constructor
4479 *
4480 * PARAMETERS :
4481 *
4482 * RETURN :
4483 *
4484 *==========================================================================*/
4485FrameNumberRegistry::FrameNumberRegistry()
4486{
4487 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4488}
4489
4490/*===========================================================================
4491 * FUNCTION : ~FrameNumberRegistry
4492 *
4493 * DESCRIPTION: Destructor
4494 *
4495 * PARAMETERS :
4496 *
4497 * RETURN :
4498 *
4499 *==========================================================================*/
4500FrameNumberRegistry::~FrameNumberRegistry()
4501{
4502}
4503
4504/*===========================================================================
4505 * FUNCTION : PurgeOldEntriesLocked
4506 *
4507 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4508 *
4509 * PARAMETERS :
4510 *
4511 * RETURN : NONE
4512 *
4513 *==========================================================================*/
4514void FrameNumberRegistry::purgeOldEntriesLocked()
4515{
4516 while (_register.begin() != _register.end()) {
4517 auto itr = _register.begin();
4518 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4519 _register.erase(itr);
4520 } else {
4521 return;
4522 }
4523 }
4524}
4525
4526/*===========================================================================
4527 * FUNCTION : allocStoreInternalFrameNumber
4528 *
4529 * DESCRIPTION: Method to note down a framework request and associate a new
4530 * internal request number against it
4531 *
4532 * PARAMETERS :
4533 * @fFrameNumber: Identifier given by framework
4534 * @internalFN : Output parameter which will have the newly generated internal
4535 * entry
4536 *
4537 * RETURN : Error code
4538 *
4539 *==========================================================================*/
4540int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4541 uint32_t &internalFrameNumber)
4542{
4543 Mutex::Autolock lock(mRegistryLock);
4544 internalFrameNumber = _nextFreeInternalNumber++;
4545 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4546 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4547 purgeOldEntriesLocked();
4548 return NO_ERROR;
4549}
4550
4551/*===========================================================================
4552 * FUNCTION : generateStoreInternalFrameNumber
4553 *
4554 * DESCRIPTION: Method to associate a new internal request number independent
4555 * of any associate with framework requests
4556 *
4557 * PARAMETERS :
4558 * @internalFrame#: Output parameter which will have the newly generated internal
4559 *
4560 *
4561 * RETURN : Error code
4562 *
4563 *==========================================================================*/
4564int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4565{
4566 Mutex::Autolock lock(mRegistryLock);
4567 internalFrameNumber = _nextFreeInternalNumber++;
4568 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4569 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4570 purgeOldEntriesLocked();
4571 return NO_ERROR;
4572}
4573
4574/*===========================================================================
4575 * FUNCTION : getFrameworkFrameNumber
4576 *
4577 * DESCRIPTION: Method to query the framework framenumber given an internal #
4578 *
4579 * PARAMETERS :
4580 * @internalFrame#: Internal reference
4581 * @frameworkframenumber: Output parameter holding framework frame entry
4582 *
4583 * RETURN : Error code
4584 *
4585 *==========================================================================*/
4586int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4587 uint32_t &frameworkFrameNumber)
4588{
4589 Mutex::Autolock lock(mRegistryLock);
4590 auto itr = _register.find(internalFrameNumber);
4591 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004592 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004593 return -ENOENT;
4594 }
4595
4596 frameworkFrameNumber = itr->second;
4597 purgeOldEntriesLocked();
4598 return NO_ERROR;
4599}
Thierry Strudel3d639192016-09-09 11:52:26 -07004600
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004601status_t QCamera3HardwareInterface::fillPbStreamConfig(
4602 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4603 QCamera3Channel *channel, uint32_t streamIndex) {
4604 if (config == nullptr) {
4605 LOGE("%s: config is null", __FUNCTION__);
4606 return BAD_VALUE;
4607 }
4608
4609 if (channel == nullptr) {
4610 LOGE("%s: channel is null", __FUNCTION__);
4611 return BAD_VALUE;
4612 }
4613
4614 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4615 if (stream == nullptr) {
4616 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4617 return NAME_NOT_FOUND;
4618 }
4619
4620 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4621 if (streamInfo == nullptr) {
4622 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4623 return NAME_NOT_FOUND;
4624 }
4625
4626 config->id = pbStreamId;
4627 config->image.width = streamInfo->dim.width;
4628 config->image.height = streamInfo->dim.height;
4629 config->image.padding = 0;
4630 config->image.format = pbStreamFormat;
4631
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004632 uint32_t totalPlaneSize = 0;
4633
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004634 // Fill plane information.
4635 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4636 pbcamera::PlaneConfiguration plane;
4637 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4638 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4639 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004640
4641 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004642 }
4643
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004644 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004645 return OK;
4646}
4647
Thierry Strudel3d639192016-09-09 11:52:26 -07004648/*===========================================================================
4649 * FUNCTION : processCaptureRequest
4650 *
4651 * DESCRIPTION: process a capture request from camera service
4652 *
4653 * PARAMETERS :
4654 * @request : request from framework to process
4655 *
4656 * RETURN :
4657 *
4658 *==========================================================================*/
4659int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004660 camera3_capture_request_t *request,
4661 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004662{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004663 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004664 int rc = NO_ERROR;
4665 int32_t request_id;
4666 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004667 bool isVidBufRequested = false;
4668 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004669 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004670
4671 pthread_mutex_lock(&mMutex);
4672
4673 // Validate current state
4674 switch (mState) {
4675 case CONFIGURED:
4676 case STARTED:
4677 /* valid state */
4678 break;
4679
4680 case ERROR:
4681 pthread_mutex_unlock(&mMutex);
4682 handleCameraDeviceError();
4683 return -ENODEV;
4684
4685 default:
4686 LOGE("Invalid state %d", mState);
4687 pthread_mutex_unlock(&mMutex);
4688 return -ENODEV;
4689 }
4690
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004691 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004692 if (rc != NO_ERROR) {
4693 LOGE("incoming request is not valid");
4694 pthread_mutex_unlock(&mMutex);
4695 return rc;
4696 }
4697
4698 meta = request->settings;
4699
4700 // For first capture request, send capture intent, and
4701 // stream on all streams
4702 if (mState == CONFIGURED) {
4703 // send an unconfigure to the backend so that the isp
4704 // resources are deallocated
4705 if (!mFirstConfiguration) {
4706 cam_stream_size_info_t stream_config_info;
4707 int32_t hal_version = CAM_HAL_V3;
4708 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4709 stream_config_info.buffer_info.min_buffers =
4710 MIN_INFLIGHT_REQUESTS;
4711 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004712 m_bIs4KVideo ? 0 :
4713 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004714 clear_metadata_buffer(mParameters);
4715 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4716 CAM_INTF_PARM_HAL_VERSION, hal_version);
4717 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4718 CAM_INTF_META_STREAM_INFO, stream_config_info);
4719 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4720 mParameters);
4721 if (rc < 0) {
4722 LOGE("set_parms for unconfigure failed");
4723 pthread_mutex_unlock(&mMutex);
4724 return rc;
4725 }
4726 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004727 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004728 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004729 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004730 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004731 property_get("persist.camera.is_type", is_type_value, "4");
4732 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4733 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4734 property_get("persist.camera.is_type_preview", is_type_value, "4");
4735 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4736 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004737
4738 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4739 int32_t hal_version = CAM_HAL_V3;
4740 uint8_t captureIntent =
4741 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4742 mCaptureIntent = captureIntent;
4743 clear_metadata_buffer(mParameters);
4744 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4745 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4746 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004747 if (mFirstConfiguration) {
4748 // configure instant AEC
4749 // Instant AEC is a session based parameter and it is needed only
4750 // once per complete session after open camera.
4751 // i.e. This is set only once for the first capture request, after open camera.
4752 setInstantAEC(meta);
4753 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004754 uint8_t fwkVideoStabMode=0;
4755 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4756 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4757 }
4758
4759 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4760 // turn it on for video/preview
4761 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4762 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004763 int32_t vsMode;
4764 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4765 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4766 rc = BAD_VALUE;
4767 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004768 LOGD("setEis %d", setEis);
4769 bool eis3Supported = false;
4770 size_t count = IS_TYPE_MAX;
4771 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4772 for (size_t i = 0; i < count; i++) {
4773 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4774 eis3Supported = true;
4775 break;
4776 }
4777 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004778
4779 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004780 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004781 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4782 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004783 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4784 is_type = isTypePreview;
4785 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4786 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4787 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004788 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004789 } else {
4790 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004791 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004792 } else {
4793 is_type = IS_TYPE_NONE;
4794 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004795 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004796 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004797 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4798 }
4799 }
4800
4801 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4802 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4803
Thierry Strudel54dc9782017-02-15 12:12:10 -08004804 //Disable tintless only if the property is set to 0
4805 memset(prop, 0, sizeof(prop));
4806 property_get("persist.camera.tintless.enable", prop, "1");
4807 int32_t tintless_value = atoi(prop);
4808
Thierry Strudel3d639192016-09-09 11:52:26 -07004809 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4810 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004811
Thierry Strudel3d639192016-09-09 11:52:26 -07004812 //Disable CDS for HFR mode or if DIS/EIS is on.
4813 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4814 //after every configure_stream
4815 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4816 (m_bIsVideo)) {
4817 int32_t cds = CAM_CDS_MODE_OFF;
4818 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4819 CAM_INTF_PARM_CDS_MODE, cds))
4820 LOGE("Failed to disable CDS for HFR mode");
4821
4822 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004823
4824 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4825 uint8_t* use_av_timer = NULL;
4826
4827 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004828 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004829 use_av_timer = &m_debug_avtimer;
4830 }
4831 else{
4832 use_av_timer =
4833 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004834 if (use_av_timer) {
4835 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4836 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004837 }
4838
4839 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4840 rc = BAD_VALUE;
4841 }
4842 }
4843
Thierry Strudel3d639192016-09-09 11:52:26 -07004844 setMobicat();
4845
4846 /* Set fps and hfr mode while sending meta stream info so that sensor
4847 * can configure appropriate streaming mode */
4848 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004849 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4850 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004851 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4852 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004853 if (rc == NO_ERROR) {
4854 int32_t max_fps =
4855 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004856 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004857 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4858 }
4859 /* For HFR, more buffers are dequeued upfront to improve the performance */
4860 if (mBatchSize) {
4861 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4862 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4863 }
4864 }
4865 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004866 LOGE("setHalFpsRange failed");
4867 }
4868 }
4869 if (meta.exists(ANDROID_CONTROL_MODE)) {
4870 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4871 rc = extractSceneMode(meta, metaMode, mParameters);
4872 if (rc != NO_ERROR) {
4873 LOGE("extractSceneMode failed");
4874 }
4875 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004876 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004877
Thierry Strudel04e026f2016-10-10 11:27:36 -07004878 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4879 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4880 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4881 rc = setVideoHdrMode(mParameters, vhdr);
4882 if (rc != NO_ERROR) {
4883 LOGE("setVideoHDR is failed");
4884 }
4885 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004886
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004887 if (meta.exists(NEXUS_EXPERIMENTAL_2017_SENSOR_MODE_FULLFOV)) {
4888 uint8_t sensorModeFullFov =
4889 meta.find(NEXUS_EXPERIMENTAL_2017_SENSOR_MODE_FULLFOV).data.u8[0];
4890 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4891 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4892 sensorModeFullFov)) {
4893 rc = BAD_VALUE;
4894 }
4895 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004896 //TODO: validate the arguments, HSV scenemode should have only the
4897 //advertised fps ranges
4898
4899 /*set the capture intent, hal version, tintless, stream info,
4900 *and disenable parameters to the backend*/
4901 LOGD("set_parms META_STREAM_INFO " );
4902 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004903 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4904 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004905 mStreamConfigInfo.type[i],
4906 mStreamConfigInfo.stream_sizes[i].width,
4907 mStreamConfigInfo.stream_sizes[i].height,
4908 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004909 mStreamConfigInfo.format[i],
4910 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004911 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004912
Thierry Strudel3d639192016-09-09 11:52:26 -07004913 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4914 mParameters);
4915 if (rc < 0) {
4916 LOGE("set_parms failed for hal version, stream info");
4917 }
4918
Chien-Yu Chenee335912017-02-09 17:53:20 -08004919 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4920 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004921 if (rc != NO_ERROR) {
4922 LOGE("Failed to get sensor output size");
4923 pthread_mutex_unlock(&mMutex);
4924 goto error_exit;
4925 }
4926
4927 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4928 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004929 mSensorModeInfo.active_array_size.width,
4930 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004931
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004932 {
4933 Mutex::Autolock l(gHdrPlusClientLock);
4934 if (EaselManagerClientOpened) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004935 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004936 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
4937 if (rc != OK) {
4938 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
4939 mCameraId, mSensorModeInfo.op_pixel_clk);
4940 pthread_mutex_unlock(&mMutex);
4941 goto error_exit;
4942 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004943 }
4944 }
4945
Thierry Strudel3d639192016-09-09 11:52:26 -07004946 /* Set batchmode before initializing channel. Since registerBuffer
4947 * internally initializes some of the channels, better set batchmode
4948 * even before first register buffer */
4949 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4950 it != mStreamInfo.end(); it++) {
4951 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4952 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4953 && mBatchSize) {
4954 rc = channel->setBatchSize(mBatchSize);
4955 //Disable per frame map unmap for HFR/batchmode case
4956 rc |= channel->setPerFrameMapUnmap(false);
4957 if (NO_ERROR != rc) {
4958 LOGE("Channel init failed %d", rc);
4959 pthread_mutex_unlock(&mMutex);
4960 goto error_exit;
4961 }
4962 }
4963 }
4964
4965 //First initialize all streams
4966 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4967 it != mStreamInfo.end(); it++) {
4968 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4969 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4970 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004971 setEis) {
4972 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4973 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4974 is_type = mStreamConfigInfo.is_type[i];
4975 break;
4976 }
4977 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004978 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004979 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004980 rc = channel->initialize(IS_TYPE_NONE);
4981 }
4982 if (NO_ERROR != rc) {
4983 LOGE("Channel initialization failed %d", rc);
4984 pthread_mutex_unlock(&mMutex);
4985 goto error_exit;
4986 }
4987 }
4988
4989 if (mRawDumpChannel) {
4990 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4991 if (rc != NO_ERROR) {
4992 LOGE("Error: Raw Dump Channel init failed");
4993 pthread_mutex_unlock(&mMutex);
4994 goto error_exit;
4995 }
4996 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004997 if (mHdrPlusRawSrcChannel) {
4998 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4999 if (rc != NO_ERROR) {
5000 LOGE("Error: HDR+ RAW Source Channel init failed");
5001 pthread_mutex_unlock(&mMutex);
5002 goto error_exit;
5003 }
5004 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005005 if (mSupportChannel) {
5006 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5007 if (rc < 0) {
5008 LOGE("Support channel initialization failed");
5009 pthread_mutex_unlock(&mMutex);
5010 goto error_exit;
5011 }
5012 }
5013 if (mAnalysisChannel) {
5014 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5015 if (rc < 0) {
5016 LOGE("Analysis channel initialization failed");
5017 pthread_mutex_unlock(&mMutex);
5018 goto error_exit;
5019 }
5020 }
5021 if (mDummyBatchChannel) {
5022 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5023 if (rc < 0) {
5024 LOGE("mDummyBatchChannel setBatchSize failed");
5025 pthread_mutex_unlock(&mMutex);
5026 goto error_exit;
5027 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005028 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005029 if (rc < 0) {
5030 LOGE("mDummyBatchChannel initialization failed");
5031 pthread_mutex_unlock(&mMutex);
5032 goto error_exit;
5033 }
5034 }
5035
5036 // Set bundle info
5037 rc = setBundleInfo();
5038 if (rc < 0) {
5039 LOGE("setBundleInfo failed %d", rc);
5040 pthread_mutex_unlock(&mMutex);
5041 goto error_exit;
5042 }
5043
5044 //update settings from app here
5045 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5046 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5047 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5048 }
5049 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5050 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5051 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5052 }
5053 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5054 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5055 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5056
5057 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5058 (mLinkedCameraId != mCameraId) ) {
5059 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5060 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005061 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005062 goto error_exit;
5063 }
5064 }
5065
5066 // add bundle related cameras
5067 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5068 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005069 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5070 &m_pDualCamCmdPtr->bundle_info;
5071 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005072 if (mIsDeviceLinked)
5073 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5074 else
5075 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5076
5077 pthread_mutex_lock(&gCamLock);
5078
5079 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5080 LOGE("Dualcam: Invalid Session Id ");
5081 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005082 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005083 goto error_exit;
5084 }
5085
5086 if (mIsMainCamera == 1) {
5087 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5088 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005089 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005090 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005091 // related session id should be session id of linked session
5092 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5093 } else {
5094 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5095 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005096 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005097 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005098 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5099 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005100 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005101 pthread_mutex_unlock(&gCamLock);
5102
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005103 rc = mCameraHandle->ops->set_dual_cam_cmd(
5104 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005105 if (rc < 0) {
5106 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005107 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005108 goto error_exit;
5109 }
5110 }
5111
5112 //Then start them.
5113 LOGH("Start META Channel");
5114 rc = mMetadataChannel->start();
5115 if (rc < 0) {
5116 LOGE("META channel start failed");
5117 pthread_mutex_unlock(&mMutex);
5118 goto error_exit;
5119 }
5120
5121 if (mAnalysisChannel) {
5122 rc = mAnalysisChannel->start();
5123 if (rc < 0) {
5124 LOGE("Analysis channel start failed");
5125 mMetadataChannel->stop();
5126 pthread_mutex_unlock(&mMutex);
5127 goto error_exit;
5128 }
5129 }
5130
5131 if (mSupportChannel) {
5132 rc = mSupportChannel->start();
5133 if (rc < 0) {
5134 LOGE("Support channel start failed");
5135 mMetadataChannel->stop();
5136 /* Although support and analysis are mutually exclusive today
5137 adding it in anycase for future proofing */
5138 if (mAnalysisChannel) {
5139 mAnalysisChannel->stop();
5140 }
5141 pthread_mutex_unlock(&mMutex);
5142 goto error_exit;
5143 }
5144 }
5145 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5146 it != mStreamInfo.end(); it++) {
5147 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5148 LOGH("Start Processing Channel mask=%d",
5149 channel->getStreamTypeMask());
5150 rc = channel->start();
5151 if (rc < 0) {
5152 LOGE("channel start failed");
5153 pthread_mutex_unlock(&mMutex);
5154 goto error_exit;
5155 }
5156 }
5157
5158 if (mRawDumpChannel) {
5159 LOGD("Starting raw dump stream");
5160 rc = mRawDumpChannel->start();
5161 if (rc != NO_ERROR) {
5162 LOGE("Error Starting Raw Dump Channel");
5163 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5164 it != mStreamInfo.end(); it++) {
5165 QCamera3Channel *channel =
5166 (QCamera3Channel *)(*it)->stream->priv;
5167 LOGH("Stopping Processing Channel mask=%d",
5168 channel->getStreamTypeMask());
5169 channel->stop();
5170 }
5171 if (mSupportChannel)
5172 mSupportChannel->stop();
5173 if (mAnalysisChannel) {
5174 mAnalysisChannel->stop();
5175 }
5176 mMetadataChannel->stop();
5177 pthread_mutex_unlock(&mMutex);
5178 goto error_exit;
5179 }
5180 }
5181
5182 if (mChannelHandle) {
5183
5184 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5185 mChannelHandle);
5186 if (rc != NO_ERROR) {
5187 LOGE("start_channel failed %d", rc);
5188 pthread_mutex_unlock(&mMutex);
5189 goto error_exit;
5190 }
5191 }
5192
5193 goto no_error;
5194error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005195 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005196 return rc;
5197no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005198 mWokenUpByDaemon = false;
5199 mPendingLiveRequest = 0;
5200 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005201 }
5202
Chien-Yu Chenee335912017-02-09 17:53:20 -08005203 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005204 {
5205 Mutex::Autolock l(gHdrPlusClientLock);
5206 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5207 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5208 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5209 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5210 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5211 rc = enableHdrPlusModeLocked();
Chien-Yu Chenee335912017-02-09 17:53:20 -08005212 if (rc != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005213 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -08005214 pthread_mutex_unlock(&mMutex);
5215 return rc;
5216 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005217
5218 mFirstPreviewIntentSeen = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -08005219 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08005220 }
5221
Thierry Strudel3d639192016-09-09 11:52:26 -07005222 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005223 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005224
5225 if (mFlushPerf) {
5226 //we cannot accept any requests during flush
5227 LOGE("process_capture_request cannot proceed during flush");
5228 pthread_mutex_unlock(&mMutex);
5229 return NO_ERROR; //should return an error
5230 }
5231
5232 if (meta.exists(ANDROID_REQUEST_ID)) {
5233 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5234 mCurrentRequestId = request_id;
5235 LOGD("Received request with id: %d", request_id);
5236 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5237 LOGE("Unable to find request id field, \
5238 & no previous id available");
5239 pthread_mutex_unlock(&mMutex);
5240 return NAME_NOT_FOUND;
5241 } else {
5242 LOGD("Re-using old request id");
5243 request_id = mCurrentRequestId;
5244 }
5245
5246 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5247 request->num_output_buffers,
5248 request->input_buffer,
5249 frameNumber);
5250 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005251 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005252 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005253 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005254 uint32_t snapshotStreamId = 0;
5255 for (size_t i = 0; i < request->num_output_buffers; i++) {
5256 const camera3_stream_buffer_t& output = request->output_buffers[i];
5257 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5258
Emilian Peev7650c122017-01-19 08:24:33 -08005259 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5260 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005261 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005262 blob_request = 1;
5263 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5264 }
5265
5266 if (output.acquire_fence != -1) {
5267 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5268 close(output.acquire_fence);
5269 if (rc != OK) {
5270 LOGE("sync wait failed %d", rc);
5271 pthread_mutex_unlock(&mMutex);
5272 return rc;
5273 }
5274 }
5275
Emilian Peev0f3c3162017-03-15 12:57:46 +00005276 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5277 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005278 depthRequestPresent = true;
5279 continue;
5280 }
5281
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005282 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005283 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005284
5285 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5286 isVidBufRequested = true;
5287 }
5288 }
5289
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005290 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5291 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5292 itr++) {
5293 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5294 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5295 channel->getStreamID(channel->getStreamTypeMask());
5296
5297 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5298 isVidBufRequested = true;
5299 }
5300 }
5301
Thierry Strudel3d639192016-09-09 11:52:26 -07005302 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005303 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005304 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005305 }
5306 if (blob_request && mRawDumpChannel) {
5307 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005308 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005309 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005310 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005311 }
5312
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005313 {
5314 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5315 // Request a RAW buffer if
5316 // 1. mHdrPlusRawSrcChannel is valid.
5317 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5318 // 3. There is no pending HDR+ request.
5319 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5320 mHdrPlusPendingRequests.size() == 0) {
5321 streamsArray.stream_request[streamsArray.num_streams].streamID =
5322 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5323 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5324 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005325 }
5326
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005327 //extract capture intent
5328 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5329 mCaptureIntent =
5330 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5331 }
5332
5333 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5334 mCacMode =
5335 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5336 }
5337
5338 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005339 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005340
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005341 {
5342 Mutex::Autolock l(gHdrPlusClientLock);
5343 // If this request has a still capture intent, try to submit an HDR+ request.
5344 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5345 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5346 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5347 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005348 }
5349
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005350 if (hdrPlusRequest) {
5351 // For a HDR+ request, just set the frame parameters.
5352 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5353 if (rc < 0) {
5354 LOGE("fail to set frame parameters");
5355 pthread_mutex_unlock(&mMutex);
5356 return rc;
5357 }
5358 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005359 /* Parse the settings:
5360 * - For every request in NORMAL MODE
5361 * - For every request in HFR mode during preview only case
5362 * - For first request of every batch in HFR mode during video
5363 * recording. In batchmode the same settings except frame number is
5364 * repeated in each request of the batch.
5365 */
5366 if (!mBatchSize ||
5367 (mBatchSize && !isVidBufRequested) ||
5368 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005369 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005370 if (rc < 0) {
5371 LOGE("fail to set frame parameters");
5372 pthread_mutex_unlock(&mMutex);
5373 return rc;
5374 }
5375 }
5376 /* For batchMode HFR, setFrameParameters is not called for every
5377 * request. But only frame number of the latest request is parsed.
5378 * Keep track of first and last frame numbers in a batch so that
5379 * metadata for the frame numbers of batch can be duplicated in
5380 * handleBatchMetadta */
5381 if (mBatchSize) {
5382 if (!mToBeQueuedVidBufs) {
5383 //start of the batch
5384 mFirstFrameNumberInBatch = request->frame_number;
5385 }
5386 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5387 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5388 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005389 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005390 return BAD_VALUE;
5391 }
5392 }
5393 if (mNeedSensorRestart) {
5394 /* Unlock the mutex as restartSensor waits on the channels to be
5395 * stopped, which in turn calls stream callback functions -
5396 * handleBufferWithLock and handleMetadataWithLock */
5397 pthread_mutex_unlock(&mMutex);
5398 rc = dynamicUpdateMetaStreamInfo();
5399 if (rc != NO_ERROR) {
5400 LOGE("Restarting the sensor failed");
5401 return BAD_VALUE;
5402 }
5403 mNeedSensorRestart = false;
5404 pthread_mutex_lock(&mMutex);
5405 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005406 if(mResetInstantAEC) {
5407 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5408 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5409 mResetInstantAEC = false;
5410 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005411 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005412 if (request->input_buffer->acquire_fence != -1) {
5413 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5414 close(request->input_buffer->acquire_fence);
5415 if (rc != OK) {
5416 LOGE("input buffer sync wait failed %d", rc);
5417 pthread_mutex_unlock(&mMutex);
5418 return rc;
5419 }
5420 }
5421 }
5422
5423 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5424 mLastCustIntentFrmNum = frameNumber;
5425 }
5426 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005427 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005428 pendingRequestIterator latestRequest;
5429 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005430 pendingRequest.num_buffers = depthRequestPresent ?
5431 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005432 pendingRequest.request_id = request_id;
5433 pendingRequest.blob_request = blob_request;
5434 pendingRequest.timestamp = 0;
5435 pendingRequest.bUrgentReceived = 0;
5436 if (request->input_buffer) {
5437 pendingRequest.input_buffer =
5438 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5439 *(pendingRequest.input_buffer) = *(request->input_buffer);
5440 pInputBuffer = pendingRequest.input_buffer;
5441 } else {
5442 pendingRequest.input_buffer = NULL;
5443 pInputBuffer = NULL;
5444 }
5445
5446 pendingRequest.pipeline_depth = 0;
5447 pendingRequest.partial_result_cnt = 0;
5448 extractJpegMetadata(mCurJpegMeta, request);
5449 pendingRequest.jpegMetadata = mCurJpegMeta;
5450 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5451 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005452 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005453 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5454 mHybridAeEnable =
5455 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5456 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005457
5458 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5459 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005460 /* DevCamDebug metadata processCaptureRequest */
5461 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5462 mDevCamDebugMetaEnable =
5463 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5464 }
5465 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5466 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005467
5468 //extract CAC info
5469 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5470 mCacMode =
5471 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5472 }
5473 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005474 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005475
5476 PendingBuffersInRequest bufsForCurRequest;
5477 bufsForCurRequest.frame_number = frameNumber;
5478 // Mark current timestamp for the new request
5479 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005480 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005481
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005482 if (hdrPlusRequest) {
5483 // Save settings for this request.
5484 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5485 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5486
5487 // Add to pending HDR+ request queue.
5488 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5489 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5490
5491 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5492 }
5493
Thierry Strudel3d639192016-09-09 11:52:26 -07005494 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005495 if ((request->output_buffers[i].stream->data_space ==
5496 HAL_DATASPACE_DEPTH) &&
5497 (HAL_PIXEL_FORMAT_BLOB ==
5498 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005499 continue;
5500 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005501 RequestedBufferInfo requestedBuf;
5502 memset(&requestedBuf, 0, sizeof(requestedBuf));
5503 requestedBuf.stream = request->output_buffers[i].stream;
5504 requestedBuf.buffer = NULL;
5505 pendingRequest.buffers.push_back(requestedBuf);
5506
5507 // Add to buffer handle the pending buffers list
5508 PendingBufferInfo bufferInfo;
5509 bufferInfo.buffer = request->output_buffers[i].buffer;
5510 bufferInfo.stream = request->output_buffers[i].stream;
5511 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5512 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5513 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5514 frameNumber, bufferInfo.buffer,
5515 channel->getStreamTypeMask(), bufferInfo.stream->format);
5516 }
5517 // Add this request packet into mPendingBuffersMap
5518 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5519 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5520 mPendingBuffersMap.get_num_overall_buffers());
5521
5522 latestRequest = mPendingRequestsList.insert(
5523 mPendingRequestsList.end(), pendingRequest);
5524 if(mFlush) {
5525 LOGI("mFlush is true");
5526 pthread_mutex_unlock(&mMutex);
5527 return NO_ERROR;
5528 }
5529
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005530 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5531 // channel.
5532 if (!hdrPlusRequest) {
5533 int indexUsed;
5534 // Notify metadata channel we receive a request
5535 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005536
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005537 if(request->input_buffer != NULL){
5538 LOGD("Input request, frame_number %d", frameNumber);
5539 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5540 if (NO_ERROR != rc) {
5541 LOGE("fail to set reproc parameters");
5542 pthread_mutex_unlock(&mMutex);
5543 return rc;
5544 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005545 }
5546
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005547 // Call request on other streams
5548 uint32_t streams_need_metadata = 0;
5549 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5550 for (size_t i = 0; i < request->num_output_buffers; i++) {
5551 const camera3_stream_buffer_t& output = request->output_buffers[i];
5552 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5553
5554 if (channel == NULL) {
5555 LOGW("invalid channel pointer for stream");
5556 continue;
5557 }
5558
5559 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5560 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5561 output.buffer, request->input_buffer, frameNumber);
5562 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005563 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005564 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5565 if (rc < 0) {
5566 LOGE("Fail to request on picture channel");
5567 pthread_mutex_unlock(&mMutex);
5568 return rc;
5569 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005570 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005571 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5572 assert(NULL != mDepthChannel);
5573 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005574
Emilian Peev7650c122017-01-19 08:24:33 -08005575 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5576 if (rc < 0) {
5577 LOGE("Fail to map on depth buffer");
5578 pthread_mutex_unlock(&mMutex);
5579 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005580 }
Emilian Peev7650c122017-01-19 08:24:33 -08005581 } else {
5582 LOGD("snapshot request with buffer %p, frame_number %d",
5583 output.buffer, frameNumber);
5584 if (!request->settings) {
5585 rc = channel->request(output.buffer, frameNumber,
5586 NULL, mPrevParameters, indexUsed);
5587 } else {
5588 rc = channel->request(output.buffer, frameNumber,
5589 NULL, mParameters, indexUsed);
5590 }
5591 if (rc < 0) {
5592 LOGE("Fail to request on picture channel");
5593 pthread_mutex_unlock(&mMutex);
5594 return rc;
5595 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005596
Emilian Peev7650c122017-01-19 08:24:33 -08005597 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5598 uint32_t j = 0;
5599 for (j = 0; j < streamsArray.num_streams; j++) {
5600 if (streamsArray.stream_request[j].streamID == streamId) {
5601 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5602 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5603 else
5604 streamsArray.stream_request[j].buf_index = indexUsed;
5605 break;
5606 }
5607 }
5608 if (j == streamsArray.num_streams) {
5609 LOGE("Did not find matching stream to update index");
5610 assert(0);
5611 }
5612
5613 pendingBufferIter->need_metadata = true;
5614 streams_need_metadata++;
5615 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005616 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005617 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5618 bool needMetadata = false;
5619 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5620 rc = yuvChannel->request(output.buffer, frameNumber,
5621 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5622 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005623 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005624 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005625 pthread_mutex_unlock(&mMutex);
5626 return rc;
5627 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005628
5629 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5630 uint32_t j = 0;
5631 for (j = 0; j < streamsArray.num_streams; j++) {
5632 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005633 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5634 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5635 else
5636 streamsArray.stream_request[j].buf_index = indexUsed;
5637 break;
5638 }
5639 }
5640 if (j == streamsArray.num_streams) {
5641 LOGE("Did not find matching stream to update index");
5642 assert(0);
5643 }
5644
5645 pendingBufferIter->need_metadata = needMetadata;
5646 if (needMetadata)
5647 streams_need_metadata += 1;
5648 LOGD("calling YUV channel request, need_metadata is %d",
5649 needMetadata);
5650 } else {
5651 LOGD("request with buffer %p, frame_number %d",
5652 output.buffer, frameNumber);
5653
5654 rc = channel->request(output.buffer, frameNumber, indexUsed);
5655
5656 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5657 uint32_t j = 0;
5658 for (j = 0; j < streamsArray.num_streams; j++) {
5659 if (streamsArray.stream_request[j].streamID == streamId) {
5660 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5661 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5662 else
5663 streamsArray.stream_request[j].buf_index = indexUsed;
5664 break;
5665 }
5666 }
5667 if (j == streamsArray.num_streams) {
5668 LOGE("Did not find matching stream to update index");
5669 assert(0);
5670 }
5671
5672 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5673 && mBatchSize) {
5674 mToBeQueuedVidBufs++;
5675 if (mToBeQueuedVidBufs == mBatchSize) {
5676 channel->queueBatchBuf();
5677 }
5678 }
5679 if (rc < 0) {
5680 LOGE("request failed");
5681 pthread_mutex_unlock(&mMutex);
5682 return rc;
5683 }
5684 }
5685 pendingBufferIter++;
5686 }
5687
5688 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5689 itr++) {
5690 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5691
5692 if (channel == NULL) {
5693 LOGE("invalid channel pointer for stream");
5694 assert(0);
5695 return BAD_VALUE;
5696 }
5697
5698 InternalRequest requestedStream;
5699 requestedStream = (*itr);
5700
5701
5702 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5703 LOGD("snapshot request internally input buffer %p, frame_number %d",
5704 request->input_buffer, frameNumber);
5705 if(request->input_buffer != NULL){
5706 rc = channel->request(NULL, frameNumber,
5707 pInputBuffer, &mReprocMeta, indexUsed, true,
5708 requestedStream.meteringOnly);
5709 if (rc < 0) {
5710 LOGE("Fail to request on picture channel");
5711 pthread_mutex_unlock(&mMutex);
5712 return rc;
5713 }
5714 } else {
5715 LOGD("snapshot request with frame_number %d", frameNumber);
5716 if (!request->settings) {
5717 rc = channel->request(NULL, frameNumber,
5718 NULL, mPrevParameters, indexUsed, true,
5719 requestedStream.meteringOnly);
5720 } else {
5721 rc = channel->request(NULL, frameNumber,
5722 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5723 }
5724 if (rc < 0) {
5725 LOGE("Fail to request on picture channel");
5726 pthread_mutex_unlock(&mMutex);
5727 return rc;
5728 }
5729
5730 if ((*itr).meteringOnly != 1) {
5731 requestedStream.need_metadata = 1;
5732 streams_need_metadata++;
5733 }
5734 }
5735
5736 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5737 uint32_t j = 0;
5738 for (j = 0; j < streamsArray.num_streams; j++) {
5739 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005740 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5741 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5742 else
5743 streamsArray.stream_request[j].buf_index = indexUsed;
5744 break;
5745 }
5746 }
5747 if (j == streamsArray.num_streams) {
5748 LOGE("Did not find matching stream to update index");
5749 assert(0);
5750 }
5751
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005752 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005753 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005754 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005755 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005756 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005757 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005758 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005759
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005760 //If 2 streams have need_metadata set to true, fail the request, unless
5761 //we copy/reference count the metadata buffer
5762 if (streams_need_metadata > 1) {
5763 LOGE("not supporting request in which two streams requires"
5764 " 2 HAL metadata for reprocessing");
5765 pthread_mutex_unlock(&mMutex);
5766 return -EINVAL;
5767 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005768
Emilian Peev7650c122017-01-19 08:24:33 -08005769 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5770 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5771 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5772 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5773 pthread_mutex_unlock(&mMutex);
5774 return BAD_VALUE;
5775 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005776 if (request->input_buffer == NULL) {
5777 /* Set the parameters to backend:
5778 * - For every request in NORMAL MODE
5779 * - For every request in HFR mode during preview only case
5780 * - Once every batch in HFR mode during video recording
5781 */
5782 if (!mBatchSize ||
5783 (mBatchSize && !isVidBufRequested) ||
5784 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5785 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5786 mBatchSize, isVidBufRequested,
5787 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005788
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005789 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5790 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5791 uint32_t m = 0;
5792 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5793 if (streamsArray.stream_request[k].streamID ==
5794 mBatchedStreamsArray.stream_request[m].streamID)
5795 break;
5796 }
5797 if (m == mBatchedStreamsArray.num_streams) {
5798 mBatchedStreamsArray.stream_request\
5799 [mBatchedStreamsArray.num_streams].streamID =
5800 streamsArray.stream_request[k].streamID;
5801 mBatchedStreamsArray.stream_request\
5802 [mBatchedStreamsArray.num_streams].buf_index =
5803 streamsArray.stream_request[k].buf_index;
5804 mBatchedStreamsArray.num_streams =
5805 mBatchedStreamsArray.num_streams + 1;
5806 }
5807 }
5808 streamsArray = mBatchedStreamsArray;
5809 }
5810 /* Update stream id of all the requested buffers */
5811 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5812 streamsArray)) {
5813 LOGE("Failed to set stream type mask in the parameters");
5814 return BAD_VALUE;
5815 }
5816
5817 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5818 mParameters);
5819 if (rc < 0) {
5820 LOGE("set_parms failed");
5821 }
5822 /* reset to zero coz, the batch is queued */
5823 mToBeQueuedVidBufs = 0;
5824 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5825 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5826 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005827 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5828 uint32_t m = 0;
5829 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5830 if (streamsArray.stream_request[k].streamID ==
5831 mBatchedStreamsArray.stream_request[m].streamID)
5832 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005833 }
5834 if (m == mBatchedStreamsArray.num_streams) {
5835 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5836 streamID = streamsArray.stream_request[k].streamID;
5837 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5838 buf_index = streamsArray.stream_request[k].buf_index;
5839 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5840 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005841 }
5842 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005843 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005844 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005845 }
5846
5847 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5848
5849 mState = STARTED;
5850 // Added a timed condition wait
5851 struct timespec ts;
5852 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005853 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005854 if (rc < 0) {
5855 isValidTimeout = 0;
5856 LOGE("Error reading the real time clock!!");
5857 }
5858 else {
5859 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005860 int64_t timeout = 5;
5861 {
5862 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5863 // If there is a pending HDR+ request, the following requests may be blocked until the
5864 // HDR+ request is done. So allow a longer timeout.
5865 if (mHdrPlusPendingRequests.size() > 0) {
5866 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5867 }
5868 }
5869 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005870 }
5871 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005872 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005873 (mState != ERROR) && (mState != DEINIT)) {
5874 if (!isValidTimeout) {
5875 LOGD("Blocking on conditional wait");
5876 pthread_cond_wait(&mRequestCond, &mMutex);
5877 }
5878 else {
5879 LOGD("Blocking on timed conditional wait");
5880 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5881 if (rc == ETIMEDOUT) {
5882 rc = -ENODEV;
5883 LOGE("Unblocked on timeout!!!!");
5884 break;
5885 }
5886 }
5887 LOGD("Unblocked");
5888 if (mWokenUpByDaemon) {
5889 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005890 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005891 break;
5892 }
5893 }
5894 pthread_mutex_unlock(&mMutex);
5895
5896 return rc;
5897}
5898
5899/*===========================================================================
5900 * FUNCTION : dump
5901 *
5902 * DESCRIPTION:
5903 *
5904 * PARAMETERS :
5905 *
5906 *
5907 * RETURN :
5908 *==========================================================================*/
5909void QCamera3HardwareInterface::dump(int fd)
5910{
5911 pthread_mutex_lock(&mMutex);
5912 dprintf(fd, "\n Camera HAL3 information Begin \n");
5913
5914 dprintf(fd, "\nNumber of pending requests: %zu \n",
5915 mPendingRequestsList.size());
5916 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5917 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5918 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5919 for(pendingRequestIterator i = mPendingRequestsList.begin();
5920 i != mPendingRequestsList.end(); i++) {
5921 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5922 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5923 i->input_buffer);
5924 }
5925 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5926 mPendingBuffersMap.get_num_overall_buffers());
5927 dprintf(fd, "-------+------------------\n");
5928 dprintf(fd, " Frame | Stream type mask \n");
5929 dprintf(fd, "-------+------------------\n");
5930 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5931 for(auto &j : req.mPendingBufferList) {
5932 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5933 dprintf(fd, " %5d | %11d \n",
5934 req.frame_number, channel->getStreamTypeMask());
5935 }
5936 }
5937 dprintf(fd, "-------+------------------\n");
5938
5939 dprintf(fd, "\nPending frame drop list: %zu\n",
5940 mPendingFrameDropList.size());
5941 dprintf(fd, "-------+-----------\n");
5942 dprintf(fd, " Frame | Stream ID \n");
5943 dprintf(fd, "-------+-----------\n");
5944 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5945 i != mPendingFrameDropList.end(); i++) {
5946 dprintf(fd, " %5d | %9d \n",
5947 i->frame_number, i->stream_ID);
5948 }
5949 dprintf(fd, "-------+-----------\n");
5950
5951 dprintf(fd, "\n Camera HAL3 information End \n");
5952
5953 /* use dumpsys media.camera as trigger to send update debug level event */
5954 mUpdateDebugLevel = true;
5955 pthread_mutex_unlock(&mMutex);
5956 return;
5957}
5958
5959/*===========================================================================
5960 * FUNCTION : flush
5961 *
5962 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5963 * conditionally restarts channels
5964 *
5965 * PARAMETERS :
5966 * @ restartChannels: re-start all channels
5967 *
5968 *
5969 * RETURN :
5970 * 0 on success
5971 * Error code on failure
5972 *==========================================================================*/
5973int QCamera3HardwareInterface::flush(bool restartChannels)
5974{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005975 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005976 int32_t rc = NO_ERROR;
5977
5978 LOGD("Unblocking Process Capture Request");
5979 pthread_mutex_lock(&mMutex);
5980 mFlush = true;
5981 pthread_mutex_unlock(&mMutex);
5982
5983 rc = stopAllChannels();
5984 // unlink of dualcam
5985 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005986 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5987 &m_pDualCamCmdPtr->bundle_info;
5988 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005989 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5990 pthread_mutex_lock(&gCamLock);
5991
5992 if (mIsMainCamera == 1) {
5993 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5994 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005995 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005996 // related session id should be session id of linked session
5997 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5998 } else {
5999 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6000 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006001 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006002 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6003 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006004 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006005 pthread_mutex_unlock(&gCamLock);
6006
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006007 rc = mCameraHandle->ops->set_dual_cam_cmd(
6008 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006009 if (rc < 0) {
6010 LOGE("Dualcam: Unlink failed, but still proceed to close");
6011 }
6012 }
6013
6014 if (rc < 0) {
6015 LOGE("stopAllChannels failed");
6016 return rc;
6017 }
6018 if (mChannelHandle) {
6019 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6020 mChannelHandle);
6021 }
6022
6023 // Reset bundle info
6024 rc = setBundleInfo();
6025 if (rc < 0) {
6026 LOGE("setBundleInfo failed %d", rc);
6027 return rc;
6028 }
6029
6030 // Mutex Lock
6031 pthread_mutex_lock(&mMutex);
6032
6033 // Unblock process_capture_request
6034 mPendingLiveRequest = 0;
6035 pthread_cond_signal(&mRequestCond);
6036
6037 rc = notifyErrorForPendingRequests();
6038 if (rc < 0) {
6039 LOGE("notifyErrorForPendingRequests failed");
6040 pthread_mutex_unlock(&mMutex);
6041 return rc;
6042 }
6043
6044 mFlush = false;
6045
6046 // Start the Streams/Channels
6047 if (restartChannels) {
6048 rc = startAllChannels();
6049 if (rc < 0) {
6050 LOGE("startAllChannels failed");
6051 pthread_mutex_unlock(&mMutex);
6052 return rc;
6053 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006054 if (mChannelHandle) {
6055 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6056 mChannelHandle);
6057 if (rc < 0) {
6058 LOGE("start_channel failed");
6059 pthread_mutex_unlock(&mMutex);
6060 return rc;
6061 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006062 }
6063 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006064 pthread_mutex_unlock(&mMutex);
6065
6066 return 0;
6067}
6068
6069/*===========================================================================
6070 * FUNCTION : flushPerf
6071 *
6072 * DESCRIPTION: This is the performance optimization version of flush that does
6073 * not use stream off, rather flushes the system
6074 *
6075 * PARAMETERS :
6076 *
6077 *
6078 * RETURN : 0 : success
6079 * -EINVAL: input is malformed (device is not valid)
6080 * -ENODEV: if the device has encountered a serious error
6081 *==========================================================================*/
6082int QCamera3HardwareInterface::flushPerf()
6083{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006084 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006085 int32_t rc = 0;
6086 struct timespec timeout;
6087 bool timed_wait = false;
6088
6089 pthread_mutex_lock(&mMutex);
6090 mFlushPerf = true;
6091 mPendingBuffersMap.numPendingBufsAtFlush =
6092 mPendingBuffersMap.get_num_overall_buffers();
6093 LOGD("Calling flush. Wait for %d buffers to return",
6094 mPendingBuffersMap.numPendingBufsAtFlush);
6095
6096 /* send the flush event to the backend */
6097 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6098 if (rc < 0) {
6099 LOGE("Error in flush: IOCTL failure");
6100 mFlushPerf = false;
6101 pthread_mutex_unlock(&mMutex);
6102 return -ENODEV;
6103 }
6104
6105 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6106 LOGD("No pending buffers in HAL, return flush");
6107 mFlushPerf = false;
6108 pthread_mutex_unlock(&mMutex);
6109 return rc;
6110 }
6111
6112 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006113 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006114 if (rc < 0) {
6115 LOGE("Error reading the real time clock, cannot use timed wait");
6116 } else {
6117 timeout.tv_sec += FLUSH_TIMEOUT;
6118 timed_wait = true;
6119 }
6120
6121 //Block on conditional variable
6122 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6123 LOGD("Waiting on mBuffersCond");
6124 if (!timed_wait) {
6125 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6126 if (rc != 0) {
6127 LOGE("pthread_cond_wait failed due to rc = %s",
6128 strerror(rc));
6129 break;
6130 }
6131 } else {
6132 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6133 if (rc != 0) {
6134 LOGE("pthread_cond_timedwait failed due to rc = %s",
6135 strerror(rc));
6136 break;
6137 }
6138 }
6139 }
6140 if (rc != 0) {
6141 mFlushPerf = false;
6142 pthread_mutex_unlock(&mMutex);
6143 return -ENODEV;
6144 }
6145
6146 LOGD("Received buffers, now safe to return them");
6147
6148 //make sure the channels handle flush
6149 //currently only required for the picture channel to release snapshot resources
6150 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6151 it != mStreamInfo.end(); it++) {
6152 QCamera3Channel *channel = (*it)->channel;
6153 if (channel) {
6154 rc = channel->flush();
6155 if (rc) {
6156 LOGE("Flushing the channels failed with error %d", rc);
6157 // even though the channel flush failed we need to continue and
6158 // return the buffers we have to the framework, however the return
6159 // value will be an error
6160 rc = -ENODEV;
6161 }
6162 }
6163 }
6164
6165 /* notify the frameworks and send errored results */
6166 rc = notifyErrorForPendingRequests();
6167 if (rc < 0) {
6168 LOGE("notifyErrorForPendingRequests failed");
6169 pthread_mutex_unlock(&mMutex);
6170 return rc;
6171 }
6172
6173 //unblock process_capture_request
6174 mPendingLiveRequest = 0;
6175 unblockRequestIfNecessary();
6176
6177 mFlushPerf = false;
6178 pthread_mutex_unlock(&mMutex);
6179 LOGD ("Flush Operation complete. rc = %d", rc);
6180 return rc;
6181}
6182
6183/*===========================================================================
6184 * FUNCTION : handleCameraDeviceError
6185 *
6186 * DESCRIPTION: This function calls internal flush and notifies the error to
6187 * framework and updates the state variable.
6188 *
6189 * PARAMETERS : None
6190 *
6191 * RETURN : NO_ERROR on Success
6192 * Error code on failure
6193 *==========================================================================*/
6194int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6195{
6196 int32_t rc = NO_ERROR;
6197
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006198 {
6199 Mutex::Autolock lock(mFlushLock);
6200 pthread_mutex_lock(&mMutex);
6201 if (mState != ERROR) {
6202 //if mState != ERROR, nothing to be done
6203 pthread_mutex_unlock(&mMutex);
6204 return NO_ERROR;
6205 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006206 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006207
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006208 rc = flush(false /* restart channels */);
6209 if (NO_ERROR != rc) {
6210 LOGE("internal flush to handle mState = ERROR failed");
6211 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006212
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006213 pthread_mutex_lock(&mMutex);
6214 mState = DEINIT;
6215 pthread_mutex_unlock(&mMutex);
6216 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006217
6218 camera3_notify_msg_t notify_msg;
6219 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6220 notify_msg.type = CAMERA3_MSG_ERROR;
6221 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6222 notify_msg.message.error.error_stream = NULL;
6223 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006224 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006225
6226 return rc;
6227}
6228
6229/*===========================================================================
6230 * FUNCTION : captureResultCb
6231 *
6232 * DESCRIPTION: Callback handler for all capture result
6233 * (streams, as well as metadata)
6234 *
6235 * PARAMETERS :
6236 * @metadata : metadata information
6237 * @buffer : actual gralloc buffer to be returned to frameworks.
6238 * NULL if metadata.
6239 *
6240 * RETURN : NONE
6241 *==========================================================================*/
6242void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6243 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6244{
6245 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006246 pthread_mutex_lock(&mMutex);
6247 uint8_t batchSize = mBatchSize;
6248 pthread_mutex_unlock(&mMutex);
6249 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006250 handleBatchMetadata(metadata_buf,
6251 true /* free_and_bufdone_meta_buf */);
6252 } else { /* mBatchSize = 0 */
6253 hdrPlusPerfLock(metadata_buf);
6254 pthread_mutex_lock(&mMutex);
6255 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006256 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006257 true /* last urgent frame of batch metadata */,
6258 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006259 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006260 pthread_mutex_unlock(&mMutex);
6261 }
6262 } else if (isInputBuffer) {
6263 pthread_mutex_lock(&mMutex);
6264 handleInputBufferWithLock(frame_number);
6265 pthread_mutex_unlock(&mMutex);
6266 } else {
6267 pthread_mutex_lock(&mMutex);
6268 handleBufferWithLock(buffer, frame_number);
6269 pthread_mutex_unlock(&mMutex);
6270 }
6271 return;
6272}
6273
6274/*===========================================================================
6275 * FUNCTION : getReprocessibleOutputStreamId
6276 *
6277 * DESCRIPTION: Get source output stream id for the input reprocess stream
6278 * based on size and format, which would be the largest
6279 * output stream if an input stream exists.
6280 *
6281 * PARAMETERS :
6282 * @id : return the stream id if found
6283 *
6284 * RETURN : int32_t type of status
6285 * NO_ERROR -- success
6286 * none-zero failure code
6287 *==========================================================================*/
6288int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6289{
6290 /* check if any output or bidirectional stream with the same size and format
6291 and return that stream */
6292 if ((mInputStreamInfo.dim.width > 0) &&
6293 (mInputStreamInfo.dim.height > 0)) {
6294 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6295 it != mStreamInfo.end(); it++) {
6296
6297 camera3_stream_t *stream = (*it)->stream;
6298 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6299 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6300 (stream->format == mInputStreamInfo.format)) {
6301 // Usage flag for an input stream and the source output stream
6302 // may be different.
6303 LOGD("Found reprocessible output stream! %p", *it);
6304 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6305 stream->usage, mInputStreamInfo.usage);
6306
6307 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6308 if (channel != NULL && channel->mStreams[0]) {
6309 id = channel->mStreams[0]->getMyServerID();
6310 return NO_ERROR;
6311 }
6312 }
6313 }
6314 } else {
6315 LOGD("No input stream, so no reprocessible output stream");
6316 }
6317 return NAME_NOT_FOUND;
6318}
6319
6320/*===========================================================================
6321 * FUNCTION : lookupFwkName
6322 *
6323 * DESCRIPTION: In case the enum is not same in fwk and backend
6324 * make sure the parameter is correctly propogated
6325 *
6326 * PARAMETERS :
6327 * @arr : map between the two enums
6328 * @len : len of the map
6329 * @hal_name : name of the hal_parm to map
6330 *
6331 * RETURN : int type of status
6332 * fwk_name -- success
6333 * none-zero failure code
6334 *==========================================================================*/
6335template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6336 size_t len, halType hal_name)
6337{
6338
6339 for (size_t i = 0; i < len; i++) {
6340 if (arr[i].hal_name == hal_name) {
6341 return arr[i].fwk_name;
6342 }
6343 }
6344
6345 /* Not able to find matching framework type is not necessarily
6346 * an error case. This happens when mm-camera supports more attributes
6347 * than the frameworks do */
6348 LOGH("Cannot find matching framework type");
6349 return NAME_NOT_FOUND;
6350}
6351
6352/*===========================================================================
6353 * FUNCTION : lookupHalName
6354 *
6355 * DESCRIPTION: In case the enum is not same in fwk and backend
6356 * make sure the parameter is correctly propogated
6357 *
6358 * PARAMETERS :
6359 * @arr : map between the two enums
6360 * @len : len of the map
6361 * @fwk_name : name of the hal_parm to map
6362 *
6363 * RETURN : int32_t type of status
6364 * hal_name -- success
6365 * none-zero failure code
6366 *==========================================================================*/
6367template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6368 size_t len, fwkType fwk_name)
6369{
6370 for (size_t i = 0; i < len; i++) {
6371 if (arr[i].fwk_name == fwk_name) {
6372 return arr[i].hal_name;
6373 }
6374 }
6375
6376 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6377 return NAME_NOT_FOUND;
6378}
6379
6380/*===========================================================================
6381 * FUNCTION : lookupProp
6382 *
6383 * DESCRIPTION: lookup a value by its name
6384 *
6385 * PARAMETERS :
6386 * @arr : map between the two enums
6387 * @len : size of the map
6388 * @name : name to be looked up
6389 *
6390 * RETURN : Value if found
6391 * CAM_CDS_MODE_MAX if not found
6392 *==========================================================================*/
6393template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6394 size_t len, const char *name)
6395{
6396 if (name) {
6397 for (size_t i = 0; i < len; i++) {
6398 if (!strcmp(arr[i].desc, name)) {
6399 return arr[i].val;
6400 }
6401 }
6402 }
6403 return CAM_CDS_MODE_MAX;
6404}
6405
6406/*===========================================================================
6407 *
6408 * DESCRIPTION:
6409 *
6410 * PARAMETERS :
6411 * @metadata : metadata information from callback
6412 * @timestamp: metadata buffer timestamp
6413 * @request_id: request id
6414 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006415 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006416 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6417 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006418 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006419 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6420 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006421 *
6422 * RETURN : camera_metadata_t*
6423 * metadata in a format specified by fwk
6424 *==========================================================================*/
6425camera_metadata_t*
6426QCamera3HardwareInterface::translateFromHalMetadata(
6427 metadata_buffer_t *metadata,
6428 nsecs_t timestamp,
6429 int32_t request_id,
6430 const CameraMetadata& jpegMetadata,
6431 uint8_t pipeline_depth,
6432 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006433 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006434 /* DevCamDebug metadata translateFromHalMetadata argument */
6435 uint8_t DevCamDebug_meta_enable,
6436 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006437 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006438 uint8_t fwk_cacMode,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006439 bool lastMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07006440{
6441 CameraMetadata camMetadata;
6442 camera_metadata_t *resultMetadata;
6443
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006444 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006445 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6446 * Timestamp is needed because it's used for shutter notify calculation.
6447 * */
6448 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6449 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006450 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006451 }
6452
Thierry Strudel3d639192016-09-09 11:52:26 -07006453 if (jpegMetadata.entryCount())
6454 camMetadata.append(jpegMetadata);
6455
6456 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6457 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6458 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6459 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006460 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006461 if (mBatchSize == 0) {
6462 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6463 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6464 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006465
Samuel Ha68ba5172016-12-15 18:41:12 -08006466 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6467 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6468 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6469 // DevCamDebug metadata translateFromHalMetadata AF
6470 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6471 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6472 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6473 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6474 }
6475 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6476 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6477 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6478 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6479 }
6480 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6481 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6482 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6483 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6484 }
6485 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6486 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6487 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6488 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6489 }
6490 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6491 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6492 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6493 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6494 }
6495 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6496 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6497 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6498 *DevCamDebug_af_monitor_pdaf_target_pos;
6499 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6500 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6501 }
6502 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6503 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6504 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6505 *DevCamDebug_af_monitor_pdaf_confidence;
6506 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6507 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6508 }
6509 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6510 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6511 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6512 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6513 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6514 }
6515 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6516 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6517 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6518 *DevCamDebug_af_monitor_tof_target_pos;
6519 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6520 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6521 }
6522 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6523 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6524 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6525 *DevCamDebug_af_monitor_tof_confidence;
6526 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6527 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6528 }
6529 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6530 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6531 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6532 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6533 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6534 }
6535 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6536 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6537 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6538 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6539 &fwk_DevCamDebug_af_monitor_type_select, 1);
6540 }
6541 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6542 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6543 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6544 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6545 &fwk_DevCamDebug_af_monitor_refocus, 1);
6546 }
6547 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6548 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6549 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6550 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6551 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6552 }
6553 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6554 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6555 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6556 *DevCamDebug_af_search_pdaf_target_pos;
6557 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6558 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6559 }
6560 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6561 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6562 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6563 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6564 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6565 }
6566 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6567 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6568 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6569 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6570 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6571 }
6572 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6573 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6574 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6575 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6576 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6577 }
6578 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6579 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6580 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6581 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6582 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6583 }
6584 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6585 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6586 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6587 *DevCamDebug_af_search_tof_target_pos;
6588 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6589 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6590 }
6591 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6592 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6593 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6594 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6595 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6596 }
6597 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6598 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6599 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6600 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6601 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6602 }
6603 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6604 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6605 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6606 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6607 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6608 }
6609 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6610 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6611 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6612 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6613 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6614 }
6615 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6616 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6617 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6618 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6619 &fwk_DevCamDebug_af_search_type_select, 1);
6620 }
6621 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6622 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6623 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6624 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6625 &fwk_DevCamDebug_af_search_next_pos, 1);
6626 }
6627 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6628 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6629 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6630 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6631 &fwk_DevCamDebug_af_search_target_pos, 1);
6632 }
6633 // DevCamDebug metadata translateFromHalMetadata AEC
6634 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6635 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6636 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6637 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6638 }
6639 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6640 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6641 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6642 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6643 }
6644 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6645 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6646 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6647 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6648 }
6649 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6650 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6651 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6652 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6653 }
6654 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6655 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6656 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6657 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6658 }
6659 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6660 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6661 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6662 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6663 }
6664 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6665 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6666 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6667 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6668 }
6669 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6670 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6671 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6672 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6673 }
Samuel Ha34229982017-02-17 13:51:11 -08006674 // DevCamDebug metadata translateFromHalMetadata zzHDR
6675 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6676 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6677 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6678 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6679 }
6680 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6681 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006682 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006683 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6684 }
6685 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6686 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6687 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6688 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6689 }
6690 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6691 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006692 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006693 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6694 }
6695 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6696 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6697 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6698 *DevCamDebug_aec_hdr_sensitivity_ratio;
6699 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6700 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6701 }
6702 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6703 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6704 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6705 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6706 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6707 }
6708 // DevCamDebug metadata translateFromHalMetadata ADRC
6709 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6710 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6711 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6712 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6713 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6714 }
6715 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6716 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6717 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6718 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6719 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6720 }
6721 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6722 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6723 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6724 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6725 }
6726 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6727 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6728 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6729 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6730 }
6731 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6732 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6733 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6734 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6735 }
6736 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6737 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6738 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6739 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6740 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006741 // DevCamDebug metadata translateFromHalMetadata AWB
6742 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6743 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6744 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6745 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6746 }
6747 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6748 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6749 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6750 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6751 }
6752 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6753 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6754 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6755 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6756 }
6757 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6758 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6759 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6760 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6761 }
6762 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6763 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6764 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6765 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6766 }
6767 }
6768 // atrace_end(ATRACE_TAG_ALWAYS);
6769
Thierry Strudel3d639192016-09-09 11:52:26 -07006770 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6771 int64_t fwk_frame_number = *frame_number;
6772 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6773 }
6774
6775 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6776 int32_t fps_range[2];
6777 fps_range[0] = (int32_t)float_range->min_fps;
6778 fps_range[1] = (int32_t)float_range->max_fps;
6779 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6780 fps_range, 2);
6781 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6782 fps_range[0], fps_range[1]);
6783 }
6784
6785 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6786 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6787 }
6788
6789 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6790 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6791 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6792 *sceneMode);
6793 if (NAME_NOT_FOUND != val) {
6794 uint8_t fwkSceneMode = (uint8_t)val;
6795 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6796 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6797 fwkSceneMode);
6798 }
6799 }
6800
6801 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6802 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6803 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6804 }
6805
6806 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6807 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6808 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6809 }
6810
6811 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6812 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6813 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6814 }
6815
6816 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6817 CAM_INTF_META_EDGE_MODE, metadata) {
6818 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6819 }
6820
6821 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6822 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6823 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6824 }
6825
6826 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6827 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6828 }
6829
6830 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6831 if (0 <= *flashState) {
6832 uint8_t fwk_flashState = (uint8_t) *flashState;
6833 if (!gCamCapability[mCameraId]->flash_available) {
6834 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6835 }
6836 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6837 }
6838 }
6839
6840 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6841 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6842 if (NAME_NOT_FOUND != val) {
6843 uint8_t fwk_flashMode = (uint8_t)val;
6844 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6845 }
6846 }
6847
6848 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6849 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6850 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6851 }
6852
6853 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6854 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6855 }
6856
6857 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6858 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6859 }
6860
6861 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6862 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6863 }
6864
6865 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6866 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6867 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6868 }
6869
6870 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6871 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6872 LOGD("fwk_videoStab = %d", fwk_videoStab);
6873 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6874 } else {
6875 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6876 // and so hardcoding the Video Stab result to OFF mode.
6877 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6878 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006879 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006880 }
6881
6882 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6883 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6884 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6885 }
6886
6887 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6888 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6889 }
6890
Thierry Strudel3d639192016-09-09 11:52:26 -07006891 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6892 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006893 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006894
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006895 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6896 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006897
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006898 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006899 blackLevelAppliedPattern->cam_black_level[0],
6900 blackLevelAppliedPattern->cam_black_level[1],
6901 blackLevelAppliedPattern->cam_black_level[2],
6902 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006903 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6904 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006905
6906#ifndef USE_HAL_3_3
6907 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05306908 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07006909 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05306910 fwk_blackLevelInd[0] /= 16.0;
6911 fwk_blackLevelInd[1] /= 16.0;
6912 fwk_blackLevelInd[2] /= 16.0;
6913 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006914 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6915 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006916#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006917 }
6918
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006919#ifndef USE_HAL_3_3
6920 // Fixed whitelevel is used by ISP/Sensor
6921 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6922 &gCamCapability[mCameraId]->white_level, 1);
6923#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006924
6925 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6926 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6927 int32_t scalerCropRegion[4];
6928 scalerCropRegion[0] = hScalerCropRegion->left;
6929 scalerCropRegion[1] = hScalerCropRegion->top;
6930 scalerCropRegion[2] = hScalerCropRegion->width;
6931 scalerCropRegion[3] = hScalerCropRegion->height;
6932
6933 // Adjust crop region from sensor output coordinate system to active
6934 // array coordinate system.
6935 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6936 scalerCropRegion[2], scalerCropRegion[3]);
6937
6938 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6939 }
6940
6941 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6942 LOGD("sensorExpTime = %lld", *sensorExpTime);
6943 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6944 }
6945
6946 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6947 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6948 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6949 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6950 }
6951
6952 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6953 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6954 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6955 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6956 sensorRollingShutterSkew, 1);
6957 }
6958
6959 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6960 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6961 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6962
6963 //calculate the noise profile based on sensitivity
6964 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6965 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6966 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6967 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6968 noise_profile[i] = noise_profile_S;
6969 noise_profile[i+1] = noise_profile_O;
6970 }
6971 LOGD("noise model entry (S, O) is (%f, %f)",
6972 noise_profile_S, noise_profile_O);
6973 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6974 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6975 }
6976
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006977#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006978 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006979 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006980 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006981 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006982 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6983 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6984 }
6985 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006986#endif
6987
Thierry Strudel3d639192016-09-09 11:52:26 -07006988 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6989 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6990 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6991 }
6992
6993 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6994 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6995 *faceDetectMode);
6996 if (NAME_NOT_FOUND != val) {
6997 uint8_t fwk_faceDetectMode = (uint8_t)val;
6998 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6999
7000 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7001 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7002 CAM_INTF_META_FACE_DETECTION, metadata) {
7003 uint8_t numFaces = MIN(
7004 faceDetectionInfo->num_faces_detected, MAX_ROI);
7005 int32_t faceIds[MAX_ROI];
7006 uint8_t faceScores[MAX_ROI];
7007 int32_t faceRectangles[MAX_ROI * 4];
7008 int32_t faceLandmarks[MAX_ROI * 6];
7009 size_t j = 0, k = 0;
7010
7011 for (size_t i = 0; i < numFaces; i++) {
7012 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7013 // Adjust crop region from sensor output coordinate system to active
7014 // array coordinate system.
7015 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7016 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7017 rect.width, rect.height);
7018
7019 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7020 faceRectangles+j, -1);
7021
7022 j+= 4;
7023 }
7024 if (numFaces <= 0) {
7025 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7026 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7027 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7028 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7029 }
7030
7031 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7032 numFaces);
7033 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7034 faceRectangles, numFaces * 4U);
7035 if (fwk_faceDetectMode ==
7036 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7037 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7038 CAM_INTF_META_FACE_LANDMARK, metadata) {
7039
7040 for (size_t i = 0; i < numFaces; i++) {
7041 // Map the co-ordinate sensor output coordinate system to active
7042 // array coordinate system.
7043 mCropRegionMapper.toActiveArray(
7044 landmarks->face_landmarks[i].left_eye_center.x,
7045 landmarks->face_landmarks[i].left_eye_center.y);
7046 mCropRegionMapper.toActiveArray(
7047 landmarks->face_landmarks[i].right_eye_center.x,
7048 landmarks->face_landmarks[i].right_eye_center.y);
7049 mCropRegionMapper.toActiveArray(
7050 landmarks->face_landmarks[i].mouth_center.x,
7051 landmarks->face_landmarks[i].mouth_center.y);
7052
7053 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007054 k+= TOTAL_LANDMARK_INDICES;
7055 }
7056 } else {
7057 for (size_t i = 0; i < numFaces; i++) {
7058 setInvalidLandmarks(faceLandmarks+k);
7059 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007060 }
7061 }
7062
7063 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7064 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7065 faceLandmarks, numFaces * 6U);
7066 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007067 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7068 CAM_INTF_META_FACE_BLINK, metadata) {
7069 uint8_t detected[MAX_ROI];
7070 uint8_t degree[MAX_ROI * 2];
7071 for (size_t i = 0; i < numFaces; i++) {
7072 detected[i] = blinks->blink[i].blink_detected;
7073 degree[2 * i] = blinks->blink[i].left_blink;
7074 degree[2 * i + 1] = blinks->blink[i].right_blink;
7075 }
7076 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7077 detected, numFaces);
7078 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7079 degree, numFaces * 2);
7080 }
7081 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7082 CAM_INTF_META_FACE_SMILE, metadata) {
7083 uint8_t degree[MAX_ROI];
7084 uint8_t confidence[MAX_ROI];
7085 for (size_t i = 0; i < numFaces; i++) {
7086 degree[i] = smiles->smile[i].smile_degree;
7087 confidence[i] = smiles->smile[i].smile_confidence;
7088 }
7089 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7090 degree, numFaces);
7091 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7092 confidence, numFaces);
7093 }
7094 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7095 CAM_INTF_META_FACE_GAZE, metadata) {
7096 int8_t angle[MAX_ROI];
7097 int32_t direction[MAX_ROI * 3];
7098 int8_t degree[MAX_ROI * 2];
7099 for (size_t i = 0; i < numFaces; i++) {
7100 angle[i] = gazes->gaze[i].gaze_angle;
7101 direction[3 * i] = gazes->gaze[i].updown_dir;
7102 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7103 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7104 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7105 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
7106 }
7107 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7108 (uint8_t *)angle, numFaces);
7109 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7110 direction, numFaces * 3);
7111 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7112 (uint8_t *)degree, numFaces * 2);
7113 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007114 }
7115 }
7116 }
7117 }
7118
7119 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7120 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007121 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007122 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007123 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007124
Shuzhen Wang14415f52016-11-16 18:26:18 -08007125 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7126 histogramBins = *histBins;
7127 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7128 }
7129
7130 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007131 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7132 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007133 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007134
7135 switch (stats_data->type) {
7136 case CAM_HISTOGRAM_TYPE_BAYER:
7137 switch (stats_data->bayer_stats.data_type) {
7138 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007139 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7140 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007141 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007142 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7143 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007144 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007145 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7146 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007147 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007148 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007149 case CAM_STATS_CHANNEL_R:
7150 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007151 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7152 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007153 }
7154 break;
7155 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007156 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007157 break;
7158 }
7159
Shuzhen Wang14415f52016-11-16 18:26:18 -08007160 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007161 }
7162 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007163 }
7164
7165 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7166 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7167 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7168 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7169 }
7170
7171 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7172 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7173 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7174 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7175 }
7176
7177 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7178 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7179 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7180 CAM_MAX_SHADING_MAP_HEIGHT);
7181 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7182 CAM_MAX_SHADING_MAP_WIDTH);
7183 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7184 lensShadingMap->lens_shading, 4U * map_width * map_height);
7185 }
7186
7187 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7188 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7189 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7190 }
7191
7192 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7193 //Populate CAM_INTF_META_TONEMAP_CURVES
7194 /* ch0 = G, ch 1 = B, ch 2 = R*/
7195 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7196 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7197 tonemap->tonemap_points_cnt,
7198 CAM_MAX_TONEMAP_CURVE_SIZE);
7199 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7200 }
7201
7202 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7203 &tonemap->curves[0].tonemap_points[0][0],
7204 tonemap->tonemap_points_cnt * 2);
7205
7206 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7207 &tonemap->curves[1].tonemap_points[0][0],
7208 tonemap->tonemap_points_cnt * 2);
7209
7210 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7211 &tonemap->curves[2].tonemap_points[0][0],
7212 tonemap->tonemap_points_cnt * 2);
7213 }
7214
7215 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7216 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7217 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7218 CC_GAIN_MAX);
7219 }
7220
7221 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7222 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7223 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7224 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7225 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7226 }
7227
7228 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7229 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7230 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7231 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7232 toneCurve->tonemap_points_cnt,
7233 CAM_MAX_TONEMAP_CURVE_SIZE);
7234 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7235 }
7236 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7237 (float*)toneCurve->curve.tonemap_points,
7238 toneCurve->tonemap_points_cnt * 2);
7239 }
7240
7241 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7242 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7243 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7244 predColorCorrectionGains->gains, 4);
7245 }
7246
7247 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7248 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7249 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7250 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7251 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7252 }
7253
7254 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7255 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7256 }
7257
7258 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7259 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7260 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7261 }
7262
7263 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7264 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7265 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7266 }
7267
7268 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7269 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7270 *effectMode);
7271 if (NAME_NOT_FOUND != val) {
7272 uint8_t fwk_effectMode = (uint8_t)val;
7273 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7274 }
7275 }
7276
7277 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7278 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7279 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7280 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7281 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7282 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7283 }
7284 int32_t fwk_testPatternData[4];
7285 fwk_testPatternData[0] = testPatternData->r;
7286 fwk_testPatternData[3] = testPatternData->b;
7287 switch (gCamCapability[mCameraId]->color_arrangement) {
7288 case CAM_FILTER_ARRANGEMENT_RGGB:
7289 case CAM_FILTER_ARRANGEMENT_GRBG:
7290 fwk_testPatternData[1] = testPatternData->gr;
7291 fwk_testPatternData[2] = testPatternData->gb;
7292 break;
7293 case CAM_FILTER_ARRANGEMENT_GBRG:
7294 case CAM_FILTER_ARRANGEMENT_BGGR:
7295 fwk_testPatternData[2] = testPatternData->gr;
7296 fwk_testPatternData[1] = testPatternData->gb;
7297 break;
7298 default:
7299 LOGE("color arrangement %d is not supported",
7300 gCamCapability[mCameraId]->color_arrangement);
7301 break;
7302 }
7303 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7304 }
7305
7306 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7307 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7308 }
7309
7310 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7311 String8 str((const char *)gps_methods);
7312 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7313 }
7314
7315 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7316 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7317 }
7318
7319 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7320 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7321 }
7322
7323 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7324 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7325 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7326 }
7327
7328 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7329 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7330 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7331 }
7332
7333 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7334 int32_t fwk_thumb_size[2];
7335 fwk_thumb_size[0] = thumb_size->width;
7336 fwk_thumb_size[1] = thumb_size->height;
7337 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7338 }
7339
7340 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7341 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7342 privateData,
7343 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7344 }
7345
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007346 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007347 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007348 meteringMode, 1);
7349 }
7350
Thierry Strudel54dc9782017-02-15 12:12:10 -08007351 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7352 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7353 LOGD("hdr_scene_data: %d %f\n",
7354 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7355 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7356 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7357 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7358 &isHdr, 1);
7359 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7360 &isHdrConfidence, 1);
7361 }
7362
7363
7364
Thierry Strudel3d639192016-09-09 11:52:26 -07007365 if (metadata->is_tuning_params_valid) {
7366 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7367 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7368 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7369
7370
7371 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7372 sizeof(uint32_t));
7373 data += sizeof(uint32_t);
7374
7375 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7376 sizeof(uint32_t));
7377 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7378 data += sizeof(uint32_t);
7379
7380 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7381 sizeof(uint32_t));
7382 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7383 data += sizeof(uint32_t);
7384
7385 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7386 sizeof(uint32_t));
7387 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7388 data += sizeof(uint32_t);
7389
7390 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7391 sizeof(uint32_t));
7392 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7393 data += sizeof(uint32_t);
7394
7395 metadata->tuning_params.tuning_mod3_data_size = 0;
7396 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7397 sizeof(uint32_t));
7398 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7399 data += sizeof(uint32_t);
7400
7401 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7402 TUNING_SENSOR_DATA_MAX);
7403 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7404 count);
7405 data += count;
7406
7407 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7408 TUNING_VFE_DATA_MAX);
7409 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7410 count);
7411 data += count;
7412
7413 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7414 TUNING_CPP_DATA_MAX);
7415 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7416 count);
7417 data += count;
7418
7419 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7420 TUNING_CAC_DATA_MAX);
7421 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7422 count);
7423 data += count;
7424
7425 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7426 (int32_t *)(void *)tuning_meta_data_blob,
7427 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7428 }
7429
7430 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7431 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7432 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7433 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7434 NEUTRAL_COL_POINTS);
7435 }
7436
7437 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7438 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7439 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7440 }
7441
7442 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7443 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7444 // Adjust crop region from sensor output coordinate system to active
7445 // array coordinate system.
7446 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7447 hAeRegions->rect.width, hAeRegions->rect.height);
7448
7449 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7450 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7451 REGIONS_TUPLE_COUNT);
7452 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7453 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7454 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7455 hAeRegions->rect.height);
7456 }
7457
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007458 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7459 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7460 if (NAME_NOT_FOUND != val) {
7461 uint8_t fwkAfMode = (uint8_t)val;
7462 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7463 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7464 } else {
7465 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7466 val);
7467 }
7468 }
7469
Thierry Strudel3d639192016-09-09 11:52:26 -07007470 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7471 uint8_t fwk_afState = (uint8_t) *afState;
7472 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007473 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007474 }
7475
7476 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7477 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7478 }
7479
7480 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7481 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7482 }
7483
7484 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7485 uint8_t fwk_lensState = *lensState;
7486 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7487 }
7488
Thierry Strudel3d639192016-09-09 11:52:26 -07007489
7490 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007491 uint32_t ab_mode = *hal_ab_mode;
7492 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7493 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7494 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7495 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007496 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007497 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007498 if (NAME_NOT_FOUND != val) {
7499 uint8_t fwk_ab_mode = (uint8_t)val;
7500 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7501 }
7502 }
7503
7504 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7505 int val = lookupFwkName(SCENE_MODES_MAP,
7506 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7507 if (NAME_NOT_FOUND != val) {
7508 uint8_t fwkBestshotMode = (uint8_t)val;
7509 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7510 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7511 } else {
7512 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7513 }
7514 }
7515
7516 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7517 uint8_t fwk_mode = (uint8_t) *mode;
7518 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7519 }
7520
7521 /* Constant metadata values to be update*/
7522 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7523 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7524
7525 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7526 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7527
7528 int32_t hotPixelMap[2];
7529 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7530
7531 // CDS
7532 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7533 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7534 }
7535
Thierry Strudel04e026f2016-10-10 11:27:36 -07007536 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7537 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007538 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007539 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7540 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7541 } else {
7542 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7543 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007544
7545 if(fwk_hdr != curr_hdr_state) {
7546 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7547 if(fwk_hdr)
7548 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7549 else
7550 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7551 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007552 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7553 }
7554
Thierry Strudel54dc9782017-02-15 12:12:10 -08007555 //binning correction
7556 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7557 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7558 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7559 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7560 }
7561
Thierry Strudel04e026f2016-10-10 11:27:36 -07007562 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007563 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007564 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7565 int8_t is_ir_on = 0;
7566
7567 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7568 if(is_ir_on != curr_ir_state) {
7569 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7570 if(is_ir_on)
7571 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7572 else
7573 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7574 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007575 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007576 }
7577
Thierry Strudel269c81a2016-10-12 12:13:59 -07007578 // AEC SPEED
7579 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7580 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7581 }
7582
7583 // AWB SPEED
7584 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7585 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7586 }
7587
Thierry Strudel3d639192016-09-09 11:52:26 -07007588 // TNR
7589 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7590 uint8_t tnr_enable = tnr->denoise_enable;
7591 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007592 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7593 int8_t is_tnr_on = 0;
7594
7595 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7596 if(is_tnr_on != curr_tnr_state) {
7597 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7598 if(is_tnr_on)
7599 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7600 else
7601 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7602 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007603
7604 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7605 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7606 }
7607
7608 // Reprocess crop data
7609 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7610 uint8_t cnt = crop_data->num_of_streams;
7611 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7612 // mm-qcamera-daemon only posts crop_data for streams
7613 // not linked to pproc. So no valid crop metadata is not
7614 // necessarily an error case.
7615 LOGD("No valid crop metadata entries");
7616 } else {
7617 uint32_t reproc_stream_id;
7618 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7619 LOGD("No reprocessible stream found, ignore crop data");
7620 } else {
7621 int rc = NO_ERROR;
7622 Vector<int32_t> roi_map;
7623 int32_t *crop = new int32_t[cnt*4];
7624 if (NULL == crop) {
7625 rc = NO_MEMORY;
7626 }
7627 if (NO_ERROR == rc) {
7628 int32_t streams_found = 0;
7629 for (size_t i = 0; i < cnt; i++) {
7630 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7631 if (pprocDone) {
7632 // HAL already does internal reprocessing,
7633 // either via reprocessing before JPEG encoding,
7634 // or offline postprocessing for pproc bypass case.
7635 crop[0] = 0;
7636 crop[1] = 0;
7637 crop[2] = mInputStreamInfo.dim.width;
7638 crop[3] = mInputStreamInfo.dim.height;
7639 } else {
7640 crop[0] = crop_data->crop_info[i].crop.left;
7641 crop[1] = crop_data->crop_info[i].crop.top;
7642 crop[2] = crop_data->crop_info[i].crop.width;
7643 crop[3] = crop_data->crop_info[i].crop.height;
7644 }
7645 roi_map.add(crop_data->crop_info[i].roi_map.left);
7646 roi_map.add(crop_data->crop_info[i].roi_map.top);
7647 roi_map.add(crop_data->crop_info[i].roi_map.width);
7648 roi_map.add(crop_data->crop_info[i].roi_map.height);
7649 streams_found++;
7650 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7651 crop[0], crop[1], crop[2], crop[3]);
7652 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7653 crop_data->crop_info[i].roi_map.left,
7654 crop_data->crop_info[i].roi_map.top,
7655 crop_data->crop_info[i].roi_map.width,
7656 crop_data->crop_info[i].roi_map.height);
7657 break;
7658
7659 }
7660 }
7661 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7662 &streams_found, 1);
7663 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7664 crop, (size_t)(streams_found * 4));
7665 if (roi_map.array()) {
7666 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7667 roi_map.array(), roi_map.size());
7668 }
7669 }
7670 if (crop) {
7671 delete [] crop;
7672 }
7673 }
7674 }
7675 }
7676
7677 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7678 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7679 // so hardcoding the CAC result to OFF mode.
7680 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7681 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7682 } else {
7683 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7684 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7685 *cacMode);
7686 if (NAME_NOT_FOUND != val) {
7687 uint8_t resultCacMode = (uint8_t)val;
7688 // check whether CAC result from CB is equal to Framework set CAC mode
7689 // If not equal then set the CAC mode came in corresponding request
7690 if (fwk_cacMode != resultCacMode) {
7691 resultCacMode = fwk_cacMode;
7692 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007693 //Check if CAC is disabled by property
7694 if (m_cacModeDisabled) {
7695 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7696 }
7697
Thierry Strudel3d639192016-09-09 11:52:26 -07007698 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7699 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7700 } else {
7701 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7702 }
7703 }
7704 }
7705
7706 // Post blob of cam_cds_data through vendor tag.
7707 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7708 uint8_t cnt = cdsInfo->num_of_streams;
7709 cam_cds_data_t cdsDataOverride;
7710 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7711 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7712 cdsDataOverride.num_of_streams = 1;
7713 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7714 uint32_t reproc_stream_id;
7715 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7716 LOGD("No reprocessible stream found, ignore cds data");
7717 } else {
7718 for (size_t i = 0; i < cnt; i++) {
7719 if (cdsInfo->cds_info[i].stream_id ==
7720 reproc_stream_id) {
7721 cdsDataOverride.cds_info[0].cds_enable =
7722 cdsInfo->cds_info[i].cds_enable;
7723 break;
7724 }
7725 }
7726 }
7727 } else {
7728 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7729 }
7730 camMetadata.update(QCAMERA3_CDS_INFO,
7731 (uint8_t *)&cdsDataOverride,
7732 sizeof(cam_cds_data_t));
7733 }
7734
7735 // Ldaf calibration data
7736 if (!mLdafCalibExist) {
7737 IF_META_AVAILABLE(uint32_t, ldafCalib,
7738 CAM_INTF_META_LDAF_EXIF, metadata) {
7739 mLdafCalibExist = true;
7740 mLdafCalib[0] = ldafCalib[0];
7741 mLdafCalib[1] = ldafCalib[1];
7742 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7743 ldafCalib[0], ldafCalib[1]);
7744 }
7745 }
7746
Thierry Strudel54dc9782017-02-15 12:12:10 -08007747 // EXIF debug data through vendor tag
7748 /*
7749 * Mobicat Mask can assume 3 values:
7750 * 1 refers to Mobicat data,
7751 * 2 refers to Stats Debug and Exif Debug Data
7752 * 3 refers to Mobicat and Stats Debug Data
7753 * We want to make sure that we are sending Exif debug data
7754 * only when Mobicat Mask is 2.
7755 */
7756 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7757 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7758 (uint8_t *)(void *)mExifParams.debug_params,
7759 sizeof(mm_jpeg_debug_exif_params_t));
7760 }
7761
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007762 // Reprocess and DDM debug data through vendor tag
7763 cam_reprocess_info_t repro_info;
7764 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007765 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7766 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007767 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007768 }
7769 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7770 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007771 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007772 }
7773 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7774 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007775 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007776 }
7777 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7778 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007779 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007780 }
7781 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7782 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007783 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007784 }
7785 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007786 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007787 }
7788 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7789 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007790 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007791 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007792 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7793 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7794 }
7795 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7796 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7797 }
7798 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7799 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007800
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007801 // INSTANT AEC MODE
7802 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7803 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7804 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7805 }
7806
Shuzhen Wange763e802016-03-31 10:24:29 -07007807 // AF scene change
7808 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7809 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7810 }
7811
Thierry Strudel3d639192016-09-09 11:52:26 -07007812 resultMetadata = camMetadata.release();
7813 return resultMetadata;
7814}
7815
7816/*===========================================================================
7817 * FUNCTION : saveExifParams
7818 *
7819 * DESCRIPTION:
7820 *
7821 * PARAMETERS :
7822 * @metadata : metadata information from callback
7823 *
7824 * RETURN : none
7825 *
7826 *==========================================================================*/
7827void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7828{
7829 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7830 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7831 if (mExifParams.debug_params) {
7832 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7833 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7834 }
7835 }
7836 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7837 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7838 if (mExifParams.debug_params) {
7839 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7840 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7841 }
7842 }
7843 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7844 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7845 if (mExifParams.debug_params) {
7846 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7847 mExifParams.debug_params->af_debug_params_valid = TRUE;
7848 }
7849 }
7850 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7851 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7852 if (mExifParams.debug_params) {
7853 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7854 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7855 }
7856 }
7857 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7858 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7859 if (mExifParams.debug_params) {
7860 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7861 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7862 }
7863 }
7864 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7865 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7866 if (mExifParams.debug_params) {
7867 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7868 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7869 }
7870 }
7871 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7872 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7873 if (mExifParams.debug_params) {
7874 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7875 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7876 }
7877 }
7878 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7879 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7880 if (mExifParams.debug_params) {
7881 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7882 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7883 }
7884 }
7885}
7886
7887/*===========================================================================
7888 * FUNCTION : get3AExifParams
7889 *
7890 * DESCRIPTION:
7891 *
7892 * PARAMETERS : none
7893 *
7894 *
7895 * RETURN : mm_jpeg_exif_params_t
7896 *
7897 *==========================================================================*/
7898mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7899{
7900 return mExifParams;
7901}
7902
7903/*===========================================================================
7904 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7905 *
7906 * DESCRIPTION:
7907 *
7908 * PARAMETERS :
7909 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007910 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
7911 * urgent metadata in a batch. Always true for
7912 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07007913 *
7914 * RETURN : camera_metadata_t*
7915 * metadata in a format specified by fwk
7916 *==========================================================================*/
7917camera_metadata_t*
7918QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007919 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07007920{
7921 CameraMetadata camMetadata;
7922 camera_metadata_t *resultMetadata;
7923
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007924 if (!lastUrgentMetadataInBatch) {
7925 /* In batch mode, use empty metadata if this is not the last in batch
7926 */
7927 resultMetadata = allocate_camera_metadata(0, 0);
7928 return resultMetadata;
7929 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007930
7931 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7932 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7933 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7934 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7935 }
7936
7937 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7938 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7939 &aecTrigger->trigger, 1);
7940 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7941 &aecTrigger->trigger_id, 1);
7942 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7943 aecTrigger->trigger);
7944 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7945 aecTrigger->trigger_id);
7946 }
7947
7948 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7949 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7950 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7951 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7952 }
7953
Thierry Strudel3d639192016-09-09 11:52:26 -07007954 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7955 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7956 &af_trigger->trigger, 1);
7957 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7958 af_trigger->trigger);
7959 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7960 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7961 af_trigger->trigger_id);
7962 }
7963
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07007964 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
7965 /*af regions*/
7966 int32_t afRegions[REGIONS_TUPLE_COUNT];
7967 // Adjust crop region from sensor output coordinate system to active
7968 // array coordinate system.
7969 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
7970 hAfRegions->rect.width, hAfRegions->rect.height);
7971
7972 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
7973 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
7974 REGIONS_TUPLE_COUNT);
7975 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7976 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
7977 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
7978 hAfRegions->rect.height);
7979 }
7980
Shuzhen Wangcc386c52017-03-29 09:28:08 -07007981 // AF region confidence
7982 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
7983 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
7984 }
7985
Thierry Strudel3d639192016-09-09 11:52:26 -07007986 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7987 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7988 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7989 if (NAME_NOT_FOUND != val) {
7990 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7991 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7992 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7993 } else {
7994 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7995 }
7996 }
7997
7998 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7999 uint32_t aeMode = CAM_AE_MODE_MAX;
8000 int32_t flashMode = CAM_FLASH_MODE_MAX;
8001 int32_t redeye = -1;
8002 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8003 aeMode = *pAeMode;
8004 }
8005 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8006 flashMode = *pFlashMode;
8007 }
8008 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8009 redeye = *pRedeye;
8010 }
8011
8012 if (1 == redeye) {
8013 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8014 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8015 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8016 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8017 flashMode);
8018 if (NAME_NOT_FOUND != val) {
8019 fwk_aeMode = (uint8_t)val;
8020 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8021 } else {
8022 LOGE("Unsupported flash mode %d", flashMode);
8023 }
8024 } else if (aeMode == CAM_AE_MODE_ON) {
8025 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8026 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8027 } else if (aeMode == CAM_AE_MODE_OFF) {
8028 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8029 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008030 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8031 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8032 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008033 } else {
8034 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8035 "flashMode:%d, aeMode:%u!!!",
8036 redeye, flashMode, aeMode);
8037 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008038 if (mInstantAEC) {
8039 // Increment frame Idx count untill a bound reached for instant AEC.
8040 mInstantAecFrameIdxCount++;
8041 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8042 CAM_INTF_META_AEC_INFO, metadata) {
8043 LOGH("ae_params->settled = %d",ae_params->settled);
8044 // If AEC settled, or if number of frames reached bound value,
8045 // should reset instant AEC.
8046 if (ae_params->settled ||
8047 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8048 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8049 mInstantAEC = false;
8050 mResetInstantAEC = true;
8051 mInstantAecFrameIdxCount = 0;
8052 }
8053 }
8054 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008055 resultMetadata = camMetadata.release();
8056 return resultMetadata;
8057}
8058
8059/*===========================================================================
8060 * FUNCTION : dumpMetadataToFile
8061 *
8062 * DESCRIPTION: Dumps tuning metadata to file system
8063 *
8064 * PARAMETERS :
8065 * @meta : tuning metadata
8066 * @dumpFrameCount : current dump frame count
8067 * @enabled : Enable mask
8068 *
8069 *==========================================================================*/
8070void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8071 uint32_t &dumpFrameCount,
8072 bool enabled,
8073 const char *type,
8074 uint32_t frameNumber)
8075{
8076 //Some sanity checks
8077 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8078 LOGE("Tuning sensor data size bigger than expected %d: %d",
8079 meta.tuning_sensor_data_size,
8080 TUNING_SENSOR_DATA_MAX);
8081 return;
8082 }
8083
8084 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8085 LOGE("Tuning VFE data size bigger than expected %d: %d",
8086 meta.tuning_vfe_data_size,
8087 TUNING_VFE_DATA_MAX);
8088 return;
8089 }
8090
8091 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8092 LOGE("Tuning CPP data size bigger than expected %d: %d",
8093 meta.tuning_cpp_data_size,
8094 TUNING_CPP_DATA_MAX);
8095 return;
8096 }
8097
8098 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8099 LOGE("Tuning CAC data size bigger than expected %d: %d",
8100 meta.tuning_cac_data_size,
8101 TUNING_CAC_DATA_MAX);
8102 return;
8103 }
8104 //
8105
8106 if(enabled){
8107 char timeBuf[FILENAME_MAX];
8108 char buf[FILENAME_MAX];
8109 memset(buf, 0, sizeof(buf));
8110 memset(timeBuf, 0, sizeof(timeBuf));
8111 time_t current_time;
8112 struct tm * timeinfo;
8113 time (&current_time);
8114 timeinfo = localtime (&current_time);
8115 if (timeinfo != NULL) {
8116 strftime (timeBuf, sizeof(timeBuf),
8117 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8118 }
8119 String8 filePath(timeBuf);
8120 snprintf(buf,
8121 sizeof(buf),
8122 "%dm_%s_%d.bin",
8123 dumpFrameCount,
8124 type,
8125 frameNumber);
8126 filePath.append(buf);
8127 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8128 if (file_fd >= 0) {
8129 ssize_t written_len = 0;
8130 meta.tuning_data_version = TUNING_DATA_VERSION;
8131 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8132 written_len += write(file_fd, data, sizeof(uint32_t));
8133 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8134 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8135 written_len += write(file_fd, data, sizeof(uint32_t));
8136 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8137 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8138 written_len += write(file_fd, data, sizeof(uint32_t));
8139 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8140 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8141 written_len += write(file_fd, data, sizeof(uint32_t));
8142 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8143 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8144 written_len += write(file_fd, data, sizeof(uint32_t));
8145 meta.tuning_mod3_data_size = 0;
8146 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8147 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8148 written_len += write(file_fd, data, sizeof(uint32_t));
8149 size_t total_size = meta.tuning_sensor_data_size;
8150 data = (void *)((uint8_t *)&meta.data);
8151 written_len += write(file_fd, data, total_size);
8152 total_size = meta.tuning_vfe_data_size;
8153 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8154 written_len += write(file_fd, data, total_size);
8155 total_size = meta.tuning_cpp_data_size;
8156 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8157 written_len += write(file_fd, data, total_size);
8158 total_size = meta.tuning_cac_data_size;
8159 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8160 written_len += write(file_fd, data, total_size);
8161 close(file_fd);
8162 }else {
8163 LOGE("fail to open file for metadata dumping");
8164 }
8165 }
8166}
8167
8168/*===========================================================================
8169 * FUNCTION : cleanAndSortStreamInfo
8170 *
8171 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8172 * and sort them such that raw stream is at the end of the list
8173 * This is a workaround for camera daemon constraint.
8174 *
8175 * PARAMETERS : None
8176 *
8177 *==========================================================================*/
8178void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8179{
8180 List<stream_info_t *> newStreamInfo;
8181
8182 /*clean up invalid streams*/
8183 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8184 it != mStreamInfo.end();) {
8185 if(((*it)->status) == INVALID){
8186 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8187 delete channel;
8188 free(*it);
8189 it = mStreamInfo.erase(it);
8190 } else {
8191 it++;
8192 }
8193 }
8194
8195 // Move preview/video/callback/snapshot streams into newList
8196 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8197 it != mStreamInfo.end();) {
8198 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8199 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8200 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8201 newStreamInfo.push_back(*it);
8202 it = mStreamInfo.erase(it);
8203 } else
8204 it++;
8205 }
8206 // Move raw streams into newList
8207 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8208 it != mStreamInfo.end();) {
8209 newStreamInfo.push_back(*it);
8210 it = mStreamInfo.erase(it);
8211 }
8212
8213 mStreamInfo = newStreamInfo;
8214}
8215
8216/*===========================================================================
8217 * FUNCTION : extractJpegMetadata
8218 *
8219 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8220 * JPEG metadata is cached in HAL, and return as part of capture
8221 * result when metadata is returned from camera daemon.
8222 *
8223 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8224 * @request: capture request
8225 *
8226 *==========================================================================*/
8227void QCamera3HardwareInterface::extractJpegMetadata(
8228 CameraMetadata& jpegMetadata,
8229 const camera3_capture_request_t *request)
8230{
8231 CameraMetadata frame_settings;
8232 frame_settings = request->settings;
8233
8234 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8235 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8236 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8237 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8238
8239 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8240 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8241 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8242 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8243
8244 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8245 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8246 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8247 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8248
8249 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8250 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8251 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8252 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8253
8254 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8255 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8256 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8257 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8258
8259 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8260 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8261 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8262 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8263
8264 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8265 int32_t thumbnail_size[2];
8266 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8267 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8268 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8269 int32_t orientation =
8270 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008271 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008272 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8273 int32_t temp;
8274 temp = thumbnail_size[0];
8275 thumbnail_size[0] = thumbnail_size[1];
8276 thumbnail_size[1] = temp;
8277 }
8278 }
8279 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8280 thumbnail_size,
8281 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8282 }
8283
8284}
8285
8286/*===========================================================================
8287 * FUNCTION : convertToRegions
8288 *
8289 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8290 *
8291 * PARAMETERS :
8292 * @rect : cam_rect_t struct to convert
8293 * @region : int32_t destination array
8294 * @weight : if we are converting from cam_area_t, weight is valid
8295 * else weight = -1
8296 *
8297 *==========================================================================*/
8298void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8299 int32_t *region, int weight)
8300{
8301 region[0] = rect.left;
8302 region[1] = rect.top;
8303 region[2] = rect.left + rect.width;
8304 region[3] = rect.top + rect.height;
8305 if (weight > -1) {
8306 region[4] = weight;
8307 }
8308}
8309
8310/*===========================================================================
8311 * FUNCTION : convertFromRegions
8312 *
8313 * DESCRIPTION: helper method to convert from array to cam_rect_t
8314 *
8315 * PARAMETERS :
8316 * @rect : cam_rect_t struct to convert
8317 * @region : int32_t destination array
8318 * @weight : if we are converting from cam_area_t, weight is valid
8319 * else weight = -1
8320 *
8321 *==========================================================================*/
8322void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008323 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008324{
Thierry Strudel3d639192016-09-09 11:52:26 -07008325 int32_t x_min = frame_settings.find(tag).data.i32[0];
8326 int32_t y_min = frame_settings.find(tag).data.i32[1];
8327 int32_t x_max = frame_settings.find(tag).data.i32[2];
8328 int32_t y_max = frame_settings.find(tag).data.i32[3];
8329 roi.weight = frame_settings.find(tag).data.i32[4];
8330 roi.rect.left = x_min;
8331 roi.rect.top = y_min;
8332 roi.rect.width = x_max - x_min;
8333 roi.rect.height = y_max - y_min;
8334}
8335
8336/*===========================================================================
8337 * FUNCTION : resetIfNeededROI
8338 *
8339 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8340 * crop region
8341 *
8342 * PARAMETERS :
8343 * @roi : cam_area_t struct to resize
8344 * @scalerCropRegion : cam_crop_region_t region to compare against
8345 *
8346 *
8347 *==========================================================================*/
8348bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8349 const cam_crop_region_t* scalerCropRegion)
8350{
8351 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8352 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8353 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8354 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8355
8356 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8357 * without having this check the calculations below to validate if the roi
8358 * is inside scalar crop region will fail resulting in the roi not being
8359 * reset causing algorithm to continue to use stale roi window
8360 */
8361 if (roi->weight == 0) {
8362 return true;
8363 }
8364
8365 if ((roi_x_max < scalerCropRegion->left) ||
8366 // right edge of roi window is left of scalar crop's left edge
8367 (roi_y_max < scalerCropRegion->top) ||
8368 // bottom edge of roi window is above scalar crop's top edge
8369 (roi->rect.left > crop_x_max) ||
8370 // left edge of roi window is beyond(right) of scalar crop's right edge
8371 (roi->rect.top > crop_y_max)){
8372 // top edge of roi windo is above scalar crop's top edge
8373 return false;
8374 }
8375 if (roi->rect.left < scalerCropRegion->left) {
8376 roi->rect.left = scalerCropRegion->left;
8377 }
8378 if (roi->rect.top < scalerCropRegion->top) {
8379 roi->rect.top = scalerCropRegion->top;
8380 }
8381 if (roi_x_max > crop_x_max) {
8382 roi_x_max = crop_x_max;
8383 }
8384 if (roi_y_max > crop_y_max) {
8385 roi_y_max = crop_y_max;
8386 }
8387 roi->rect.width = roi_x_max - roi->rect.left;
8388 roi->rect.height = roi_y_max - roi->rect.top;
8389 return true;
8390}
8391
8392/*===========================================================================
8393 * FUNCTION : convertLandmarks
8394 *
8395 * DESCRIPTION: helper method to extract the landmarks from face detection info
8396 *
8397 * PARAMETERS :
8398 * @landmark_data : input landmark data to be converted
8399 * @landmarks : int32_t destination array
8400 *
8401 *
8402 *==========================================================================*/
8403void QCamera3HardwareInterface::convertLandmarks(
8404 cam_face_landmarks_info_t landmark_data,
8405 int32_t *landmarks)
8406{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008407 if (landmark_data.is_left_eye_valid) {
8408 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8409 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8410 } else {
8411 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8412 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8413 }
8414
8415 if (landmark_data.is_right_eye_valid) {
8416 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8417 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8418 } else {
8419 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8420 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8421 }
8422
8423 if (landmark_data.is_mouth_valid) {
8424 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8425 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8426 } else {
8427 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8428 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8429 }
8430}
8431
8432/*===========================================================================
8433 * FUNCTION : setInvalidLandmarks
8434 *
8435 * DESCRIPTION: helper method to set invalid landmarks
8436 *
8437 * PARAMETERS :
8438 * @landmarks : int32_t destination array
8439 *
8440 *
8441 *==========================================================================*/
8442void QCamera3HardwareInterface::setInvalidLandmarks(
8443 int32_t *landmarks)
8444{
8445 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8446 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8447 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8448 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8449 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8450 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008451}
8452
8453#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008454
8455/*===========================================================================
8456 * FUNCTION : getCapabilities
8457 *
8458 * DESCRIPTION: query camera capability from back-end
8459 *
8460 * PARAMETERS :
8461 * @ops : mm-interface ops structure
8462 * @cam_handle : camera handle for which we need capability
8463 *
8464 * RETURN : ptr type of capability structure
8465 * capability for success
8466 * NULL for failure
8467 *==========================================================================*/
8468cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8469 uint32_t cam_handle)
8470{
8471 int rc = NO_ERROR;
8472 QCamera3HeapMemory *capabilityHeap = NULL;
8473 cam_capability_t *cap_ptr = NULL;
8474
8475 if (ops == NULL) {
8476 LOGE("Invalid arguments");
8477 return NULL;
8478 }
8479
8480 capabilityHeap = new QCamera3HeapMemory(1);
8481 if (capabilityHeap == NULL) {
8482 LOGE("creation of capabilityHeap failed");
8483 return NULL;
8484 }
8485
8486 /* Allocate memory for capability buffer */
8487 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8488 if(rc != OK) {
8489 LOGE("No memory for cappability");
8490 goto allocate_failed;
8491 }
8492
8493 /* Map memory for capability buffer */
8494 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8495
8496 rc = ops->map_buf(cam_handle,
8497 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8498 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8499 if(rc < 0) {
8500 LOGE("failed to map capability buffer");
8501 rc = FAILED_TRANSACTION;
8502 goto map_failed;
8503 }
8504
8505 /* Query Capability */
8506 rc = ops->query_capability(cam_handle);
8507 if(rc < 0) {
8508 LOGE("failed to query capability");
8509 rc = FAILED_TRANSACTION;
8510 goto query_failed;
8511 }
8512
8513 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8514 if (cap_ptr == NULL) {
8515 LOGE("out of memory");
8516 rc = NO_MEMORY;
8517 goto query_failed;
8518 }
8519
8520 memset(cap_ptr, 0, sizeof(cam_capability_t));
8521 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8522
8523 int index;
8524 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8525 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8526 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8527 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8528 }
8529
8530query_failed:
8531 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8532map_failed:
8533 capabilityHeap->deallocate();
8534allocate_failed:
8535 delete capabilityHeap;
8536
8537 if (rc != NO_ERROR) {
8538 return NULL;
8539 } else {
8540 return cap_ptr;
8541 }
8542}
8543
Thierry Strudel3d639192016-09-09 11:52:26 -07008544/*===========================================================================
8545 * FUNCTION : initCapabilities
8546 *
8547 * DESCRIPTION: initialize camera capabilities in static data struct
8548 *
8549 * PARAMETERS :
8550 * @cameraId : camera Id
8551 *
8552 * RETURN : int32_t type of status
8553 * NO_ERROR -- success
8554 * none-zero failure code
8555 *==========================================================================*/
8556int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8557{
8558 int rc = 0;
8559 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008560 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008561
8562 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8563 if (rc) {
8564 LOGE("camera_open failed. rc = %d", rc);
8565 goto open_failed;
8566 }
8567 if (!cameraHandle) {
8568 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8569 goto open_failed;
8570 }
8571
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008572 handle = get_main_camera_handle(cameraHandle->camera_handle);
8573 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8574 if (gCamCapability[cameraId] == NULL) {
8575 rc = FAILED_TRANSACTION;
8576 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008577 }
8578
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008579 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008580 if (is_dual_camera_by_idx(cameraId)) {
8581 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8582 gCamCapability[cameraId]->aux_cam_cap =
8583 getCapabilities(cameraHandle->ops, handle);
8584 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8585 rc = FAILED_TRANSACTION;
8586 free(gCamCapability[cameraId]);
8587 goto failed_op;
8588 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008589
8590 // Copy the main camera capability to main_cam_cap struct
8591 gCamCapability[cameraId]->main_cam_cap =
8592 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8593 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8594 LOGE("out of memory");
8595 rc = NO_MEMORY;
8596 goto failed_op;
8597 }
8598 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8599 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008600 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008601failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008602 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8603 cameraHandle = NULL;
8604open_failed:
8605 return rc;
8606}
8607
8608/*==========================================================================
8609 * FUNCTION : get3Aversion
8610 *
8611 * DESCRIPTION: get the Q3A S/W version
8612 *
8613 * PARAMETERS :
8614 * @sw_version: Reference of Q3A structure which will hold version info upon
8615 * return
8616 *
8617 * RETURN : None
8618 *
8619 *==========================================================================*/
8620void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8621{
8622 if(gCamCapability[mCameraId])
8623 sw_version = gCamCapability[mCameraId]->q3a_version;
8624 else
8625 LOGE("Capability structure NULL!");
8626}
8627
8628
8629/*===========================================================================
8630 * FUNCTION : initParameters
8631 *
8632 * DESCRIPTION: initialize camera parameters
8633 *
8634 * PARAMETERS :
8635 *
8636 * RETURN : int32_t type of status
8637 * NO_ERROR -- success
8638 * none-zero failure code
8639 *==========================================================================*/
8640int QCamera3HardwareInterface::initParameters()
8641{
8642 int rc = 0;
8643
8644 //Allocate Set Param Buffer
8645 mParamHeap = new QCamera3HeapMemory(1);
8646 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8647 if(rc != OK) {
8648 rc = NO_MEMORY;
8649 LOGE("Failed to allocate SETPARM Heap memory");
8650 delete mParamHeap;
8651 mParamHeap = NULL;
8652 return rc;
8653 }
8654
8655 //Map memory for parameters buffer
8656 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8657 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8658 mParamHeap->getFd(0),
8659 sizeof(metadata_buffer_t),
8660 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8661 if(rc < 0) {
8662 LOGE("failed to map SETPARM buffer");
8663 rc = FAILED_TRANSACTION;
8664 mParamHeap->deallocate();
8665 delete mParamHeap;
8666 mParamHeap = NULL;
8667 return rc;
8668 }
8669
8670 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8671
8672 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8673 return rc;
8674}
8675
8676/*===========================================================================
8677 * FUNCTION : deinitParameters
8678 *
8679 * DESCRIPTION: de-initialize camera parameters
8680 *
8681 * PARAMETERS :
8682 *
8683 * RETURN : NONE
8684 *==========================================================================*/
8685void QCamera3HardwareInterface::deinitParameters()
8686{
8687 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8688 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8689
8690 mParamHeap->deallocate();
8691 delete mParamHeap;
8692 mParamHeap = NULL;
8693
8694 mParameters = NULL;
8695
8696 free(mPrevParameters);
8697 mPrevParameters = NULL;
8698}
8699
8700/*===========================================================================
8701 * FUNCTION : calcMaxJpegSize
8702 *
8703 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8704 *
8705 * PARAMETERS :
8706 *
8707 * RETURN : max_jpeg_size
8708 *==========================================================================*/
8709size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8710{
8711 size_t max_jpeg_size = 0;
8712 size_t temp_width, temp_height;
8713 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8714 MAX_SIZES_CNT);
8715 for (size_t i = 0; i < count; i++) {
8716 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8717 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8718 if (temp_width * temp_height > max_jpeg_size ) {
8719 max_jpeg_size = temp_width * temp_height;
8720 }
8721 }
8722 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8723 return max_jpeg_size;
8724}
8725
8726/*===========================================================================
8727 * FUNCTION : getMaxRawSize
8728 *
8729 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8730 *
8731 * PARAMETERS :
8732 *
8733 * RETURN : Largest supported Raw Dimension
8734 *==========================================================================*/
8735cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8736{
8737 int max_width = 0;
8738 cam_dimension_t maxRawSize;
8739
8740 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8741 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8742 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8743 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8744 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8745 }
8746 }
8747 return maxRawSize;
8748}
8749
8750
8751/*===========================================================================
8752 * FUNCTION : calcMaxJpegDim
8753 *
8754 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8755 *
8756 * PARAMETERS :
8757 *
8758 * RETURN : max_jpeg_dim
8759 *==========================================================================*/
8760cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8761{
8762 cam_dimension_t max_jpeg_dim;
8763 cam_dimension_t curr_jpeg_dim;
8764 max_jpeg_dim.width = 0;
8765 max_jpeg_dim.height = 0;
8766 curr_jpeg_dim.width = 0;
8767 curr_jpeg_dim.height = 0;
8768 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8769 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8770 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8771 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8772 max_jpeg_dim.width * max_jpeg_dim.height ) {
8773 max_jpeg_dim.width = curr_jpeg_dim.width;
8774 max_jpeg_dim.height = curr_jpeg_dim.height;
8775 }
8776 }
8777 return max_jpeg_dim;
8778}
8779
8780/*===========================================================================
8781 * FUNCTION : addStreamConfig
8782 *
8783 * DESCRIPTION: adds the stream configuration to the array
8784 *
8785 * PARAMETERS :
8786 * @available_stream_configs : pointer to stream configuration array
8787 * @scalar_format : scalar format
8788 * @dim : configuration dimension
8789 * @config_type : input or output configuration type
8790 *
8791 * RETURN : NONE
8792 *==========================================================================*/
8793void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8794 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8795{
8796 available_stream_configs.add(scalar_format);
8797 available_stream_configs.add(dim.width);
8798 available_stream_configs.add(dim.height);
8799 available_stream_configs.add(config_type);
8800}
8801
8802/*===========================================================================
8803 * FUNCTION : suppportBurstCapture
8804 *
8805 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8806 *
8807 * PARAMETERS :
8808 * @cameraId : camera Id
8809 *
8810 * RETURN : true if camera supports BURST_CAPTURE
8811 * false otherwise
8812 *==========================================================================*/
8813bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8814{
8815 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8816 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8817 const int32_t highResWidth = 3264;
8818 const int32_t highResHeight = 2448;
8819
8820 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8821 // Maximum resolution images cannot be captured at >= 10fps
8822 // -> not supporting BURST_CAPTURE
8823 return false;
8824 }
8825
8826 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8827 // Maximum resolution images can be captured at >= 20fps
8828 // --> supporting BURST_CAPTURE
8829 return true;
8830 }
8831
8832 // Find the smallest highRes resolution, or largest resolution if there is none
8833 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8834 MAX_SIZES_CNT);
8835 size_t highRes = 0;
8836 while ((highRes + 1 < totalCnt) &&
8837 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8838 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8839 highResWidth * highResHeight)) {
8840 highRes++;
8841 }
8842 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8843 return true;
8844 } else {
8845 return false;
8846 }
8847}
8848
8849/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008850 * FUNCTION : getPDStatIndex
8851 *
8852 * DESCRIPTION: Return the meta raw phase detection statistics index if present
8853 *
8854 * PARAMETERS :
8855 * @caps : camera capabilities
8856 *
8857 * RETURN : int32_t type
8858 * non-negative - on success
8859 * -1 - on failure
8860 *==========================================================================*/
8861int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
8862 if (nullptr == caps) {
8863 return -1;
8864 }
8865
8866 uint32_t metaRawCount = caps->meta_raw_channel_count;
8867 int32_t ret = -1;
8868 for (size_t i = 0; i < metaRawCount; i++) {
8869 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
8870 ret = i;
8871 break;
8872 }
8873 }
8874
8875 return ret;
8876}
8877
8878/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07008879 * FUNCTION : initStaticMetadata
8880 *
8881 * DESCRIPTION: initialize the static metadata
8882 *
8883 * PARAMETERS :
8884 * @cameraId : camera Id
8885 *
8886 * RETURN : int32_t type of status
8887 * 0 -- success
8888 * non-zero failure code
8889 *==========================================================================*/
8890int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8891{
8892 int rc = 0;
8893 CameraMetadata staticInfo;
8894 size_t count = 0;
8895 bool limitedDevice = false;
8896 char prop[PROPERTY_VALUE_MAX];
8897 bool supportBurst = false;
8898
8899 supportBurst = supportBurstCapture(cameraId);
8900
8901 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8902 * guaranteed or if min fps of max resolution is less than 20 fps, its
8903 * advertised as limited device*/
8904 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8905 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8906 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8907 !supportBurst;
8908
8909 uint8_t supportedHwLvl = limitedDevice ?
8910 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008911#ifndef USE_HAL_3_3
8912 // LEVEL_3 - This device will support level 3.
8913 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8914#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008915 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008916#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008917
8918 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8919 &supportedHwLvl, 1);
8920
8921 bool facingBack = false;
8922 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8923 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8924 facingBack = true;
8925 }
8926 /*HAL 3 only*/
8927 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8928 &gCamCapability[cameraId]->min_focus_distance, 1);
8929
8930 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8931 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8932
8933 /*should be using focal lengths but sensor doesn't provide that info now*/
8934 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8935 &gCamCapability[cameraId]->focal_length,
8936 1);
8937
8938 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8939 gCamCapability[cameraId]->apertures,
8940 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8941
8942 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8943 gCamCapability[cameraId]->filter_densities,
8944 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8945
8946
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008947 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
8948 size_t mode_count =
8949 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
8950 for (size_t i = 0; i < mode_count; i++) {
8951 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
8952 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008953 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008954 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07008955
8956 int32_t lens_shading_map_size[] = {
8957 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8958 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8959 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8960 lens_shading_map_size,
8961 sizeof(lens_shading_map_size)/sizeof(int32_t));
8962
8963 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8964 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8965
8966 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8967 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8968
8969 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8970 &gCamCapability[cameraId]->max_frame_duration, 1);
8971
8972 camera_metadata_rational baseGainFactor = {
8973 gCamCapability[cameraId]->base_gain_factor.numerator,
8974 gCamCapability[cameraId]->base_gain_factor.denominator};
8975 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8976 &baseGainFactor, 1);
8977
8978 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8979 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8980
8981 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8982 gCamCapability[cameraId]->pixel_array_size.height};
8983 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8984 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8985
8986 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8987 gCamCapability[cameraId]->active_array_size.top,
8988 gCamCapability[cameraId]->active_array_size.width,
8989 gCamCapability[cameraId]->active_array_size.height};
8990 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8991 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8992
8993 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8994 &gCamCapability[cameraId]->white_level, 1);
8995
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008996 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8997 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8998 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008999 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009000 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009001
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009002#ifndef USE_HAL_3_3
9003 bool hasBlackRegions = false;
9004 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9005 LOGW("black_region_count: %d is bounded to %d",
9006 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9007 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9008 }
9009 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9010 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9011 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9012 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9013 }
9014 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9015 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9016 hasBlackRegions = true;
9017 }
9018#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009019 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9020 &gCamCapability[cameraId]->flash_charge_duration, 1);
9021
9022 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9023 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9024
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009025 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9026 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9027 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009028 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9029 &timestampSource, 1);
9030
Thierry Strudel54dc9782017-02-15 12:12:10 -08009031 //update histogram vendor data
9032 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009033 &gCamCapability[cameraId]->histogram_size, 1);
9034
Thierry Strudel54dc9782017-02-15 12:12:10 -08009035 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009036 &gCamCapability[cameraId]->max_histogram_count, 1);
9037
Shuzhen Wang14415f52016-11-16 18:26:18 -08009038 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9039 //so that app can request fewer number of bins than the maximum supported.
9040 std::vector<int32_t> histBins;
9041 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9042 histBins.push_back(maxHistBins);
9043 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9044 (maxHistBins & 0x1) == 0) {
9045 histBins.push_back(maxHistBins >> 1);
9046 maxHistBins >>= 1;
9047 }
9048 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9049 histBins.data(), histBins.size());
9050
Thierry Strudel3d639192016-09-09 11:52:26 -07009051 int32_t sharpness_map_size[] = {
9052 gCamCapability[cameraId]->sharpness_map_size.width,
9053 gCamCapability[cameraId]->sharpness_map_size.height};
9054
9055 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9056 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9057
9058 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9059 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9060
Emilian Peev0f3c3162017-03-15 12:57:46 +00009061 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9062 if (0 <= indexPD) {
9063 // Advertise PD stats data as part of the Depth capabilities
9064 int32_t depthWidth =
9065 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9066 int32_t depthHeight =
9067 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9068 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9069 assert(0 < depthSamplesCount);
9070 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9071 &depthSamplesCount, 1);
9072
9073 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9074 depthHeight,
9075 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9076 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9077 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9078 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9079 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9080
9081 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9082 depthHeight, 33333333,
9083 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9084 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9085 depthMinDuration,
9086 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9087
9088 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9089 depthHeight, 0,
9090 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9091 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9092 depthStallDuration,
9093 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9094
9095 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9096 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9097 }
9098
Thierry Strudel3d639192016-09-09 11:52:26 -07009099 int32_t scalar_formats[] = {
9100 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9101 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9102 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9103 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9104 HAL_PIXEL_FORMAT_RAW10,
9105 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009106 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9107 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9108 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009109
9110 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9111 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9112 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9113 count, MAX_SIZES_CNT, available_processed_sizes);
9114 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9115 available_processed_sizes, count * 2);
9116
9117 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9118 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9119 makeTable(gCamCapability[cameraId]->raw_dim,
9120 count, MAX_SIZES_CNT, available_raw_sizes);
9121 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9122 available_raw_sizes, count * 2);
9123
9124 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9125 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9126 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9127 count, MAX_SIZES_CNT, available_fps_ranges);
9128 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9129 available_fps_ranges, count * 2);
9130
9131 camera_metadata_rational exposureCompensationStep = {
9132 gCamCapability[cameraId]->exp_compensation_step.numerator,
9133 gCamCapability[cameraId]->exp_compensation_step.denominator};
9134 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9135 &exposureCompensationStep, 1);
9136
9137 Vector<uint8_t> availableVstabModes;
9138 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9139 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009140 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009141 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009142 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009143 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009144 count = IS_TYPE_MAX;
9145 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9146 for (size_t i = 0; i < count; i++) {
9147 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9148 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9149 eisSupported = true;
9150 break;
9151 }
9152 }
9153 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009154 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9155 }
9156 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9157 availableVstabModes.array(), availableVstabModes.size());
9158
9159 /*HAL 1 and HAL 3 common*/
9160 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9161 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9162 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009163 // Cap the max zoom to the max preferred value
9164 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009165 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9166 &maxZoom, 1);
9167
9168 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9169 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9170
9171 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9172 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9173 max3aRegions[2] = 0; /* AF not supported */
9174 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9175 max3aRegions, 3);
9176
9177 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9178 memset(prop, 0, sizeof(prop));
9179 property_get("persist.camera.facedetect", prop, "1");
9180 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9181 LOGD("Support face detection mode: %d",
9182 supportedFaceDetectMode);
9183
9184 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009185 /* support mode should be OFF if max number of face is 0 */
9186 if (maxFaces <= 0) {
9187 supportedFaceDetectMode = 0;
9188 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009189 Vector<uint8_t> availableFaceDetectModes;
9190 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9191 if (supportedFaceDetectMode == 1) {
9192 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9193 } else if (supportedFaceDetectMode == 2) {
9194 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9195 } else if (supportedFaceDetectMode == 3) {
9196 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9197 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9198 } else {
9199 maxFaces = 0;
9200 }
9201 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9202 availableFaceDetectModes.array(),
9203 availableFaceDetectModes.size());
9204 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9205 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009206 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9207 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9208 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009209
9210 int32_t exposureCompensationRange[] = {
9211 gCamCapability[cameraId]->exposure_compensation_min,
9212 gCamCapability[cameraId]->exposure_compensation_max};
9213 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9214 exposureCompensationRange,
9215 sizeof(exposureCompensationRange)/sizeof(int32_t));
9216
9217 uint8_t lensFacing = (facingBack) ?
9218 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9219 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9220
9221 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9222 available_thumbnail_sizes,
9223 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9224
9225 /*all sizes will be clubbed into this tag*/
9226 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9227 /*android.scaler.availableStreamConfigurations*/
9228 Vector<int32_t> available_stream_configs;
9229 cam_dimension_t active_array_dim;
9230 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9231 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009232
9233 /*advertise list of input dimensions supported based on below property.
9234 By default all sizes upto 5MP will be advertised.
9235 Note that the setprop resolution format should be WxH.
9236 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9237 To list all supported sizes, setprop needs to be set with "0x0" */
9238 cam_dimension_t minInputSize = {2592,1944}; //5MP
9239 memset(prop, 0, sizeof(prop));
9240 property_get("persist.camera.input.minsize", prop, "2592x1944");
9241 if (strlen(prop) > 0) {
9242 char *saveptr = NULL;
9243 char *token = strtok_r(prop, "x", &saveptr);
9244 if (token != NULL) {
9245 minInputSize.width = atoi(token);
9246 }
9247 token = strtok_r(NULL, "x", &saveptr);
9248 if (token != NULL) {
9249 minInputSize.height = atoi(token);
9250 }
9251 }
9252
Thierry Strudel3d639192016-09-09 11:52:26 -07009253 /* Add input/output stream configurations for each scalar formats*/
9254 for (size_t j = 0; j < scalar_formats_count; j++) {
9255 switch (scalar_formats[j]) {
9256 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9257 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9258 case HAL_PIXEL_FORMAT_RAW10:
9259 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9260 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9261 addStreamConfig(available_stream_configs, scalar_formats[j],
9262 gCamCapability[cameraId]->raw_dim[i],
9263 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9264 }
9265 break;
9266 case HAL_PIXEL_FORMAT_BLOB:
9267 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9268 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9269 addStreamConfig(available_stream_configs, scalar_formats[j],
9270 gCamCapability[cameraId]->picture_sizes_tbl[i],
9271 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9272 }
9273 break;
9274 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9275 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9276 default:
9277 cam_dimension_t largest_picture_size;
9278 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9279 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9280 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9281 addStreamConfig(available_stream_configs, scalar_formats[j],
9282 gCamCapability[cameraId]->picture_sizes_tbl[i],
9283 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009284 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9285 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9286 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9287 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9288 >= minInputSize.width) || (gCamCapability[cameraId]->
9289 picture_sizes_tbl[i].height >= minInputSize.height)) {
9290 addStreamConfig(available_stream_configs, scalar_formats[j],
9291 gCamCapability[cameraId]->picture_sizes_tbl[i],
9292 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9293 }
9294 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009295 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009296
Thierry Strudel3d639192016-09-09 11:52:26 -07009297 break;
9298 }
9299 }
9300
9301 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9302 available_stream_configs.array(), available_stream_configs.size());
9303 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9304 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9305
9306 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9307 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9308
9309 /* android.scaler.availableMinFrameDurations */
9310 Vector<int64_t> available_min_durations;
9311 for (size_t j = 0; j < scalar_formats_count; j++) {
9312 switch (scalar_formats[j]) {
9313 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9314 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9315 case HAL_PIXEL_FORMAT_RAW10:
9316 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9317 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9318 available_min_durations.add(scalar_formats[j]);
9319 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9320 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9321 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9322 }
9323 break;
9324 default:
9325 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9326 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9327 available_min_durations.add(scalar_formats[j]);
9328 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9329 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9330 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9331 }
9332 break;
9333 }
9334 }
9335 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9336 available_min_durations.array(), available_min_durations.size());
9337
9338 Vector<int32_t> available_hfr_configs;
9339 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9340 int32_t fps = 0;
9341 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9342 case CAM_HFR_MODE_60FPS:
9343 fps = 60;
9344 break;
9345 case CAM_HFR_MODE_90FPS:
9346 fps = 90;
9347 break;
9348 case CAM_HFR_MODE_120FPS:
9349 fps = 120;
9350 break;
9351 case CAM_HFR_MODE_150FPS:
9352 fps = 150;
9353 break;
9354 case CAM_HFR_MODE_180FPS:
9355 fps = 180;
9356 break;
9357 case CAM_HFR_MODE_210FPS:
9358 fps = 210;
9359 break;
9360 case CAM_HFR_MODE_240FPS:
9361 fps = 240;
9362 break;
9363 case CAM_HFR_MODE_480FPS:
9364 fps = 480;
9365 break;
9366 case CAM_HFR_MODE_OFF:
9367 case CAM_HFR_MODE_MAX:
9368 default:
9369 break;
9370 }
9371
9372 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9373 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9374 /* For each HFR frame rate, need to advertise one variable fps range
9375 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9376 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9377 * set by the app. When video recording is started, [120, 120] is
9378 * set. This way sensor configuration does not change when recording
9379 * is started */
9380
9381 /* (width, height, fps_min, fps_max, batch_size_max) */
9382 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9383 j < MAX_SIZES_CNT; j++) {
9384 available_hfr_configs.add(
9385 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9386 available_hfr_configs.add(
9387 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9388 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9389 available_hfr_configs.add(fps);
9390 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9391
9392 /* (width, height, fps_min, fps_max, batch_size_max) */
9393 available_hfr_configs.add(
9394 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9395 available_hfr_configs.add(
9396 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9397 available_hfr_configs.add(fps);
9398 available_hfr_configs.add(fps);
9399 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9400 }
9401 }
9402 }
9403 //Advertise HFR capability only if the property is set
9404 memset(prop, 0, sizeof(prop));
9405 property_get("persist.camera.hal3hfr.enable", prop, "1");
9406 uint8_t hfrEnable = (uint8_t)atoi(prop);
9407
9408 if(hfrEnable && available_hfr_configs.array()) {
9409 staticInfo.update(
9410 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9411 available_hfr_configs.array(), available_hfr_configs.size());
9412 }
9413
9414 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9415 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9416 &max_jpeg_size, 1);
9417
9418 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9419 size_t size = 0;
9420 count = CAM_EFFECT_MODE_MAX;
9421 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9422 for (size_t i = 0; i < count; i++) {
9423 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9424 gCamCapability[cameraId]->supported_effects[i]);
9425 if (NAME_NOT_FOUND != val) {
9426 avail_effects[size] = (uint8_t)val;
9427 size++;
9428 }
9429 }
9430 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9431 avail_effects,
9432 size);
9433
9434 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9435 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9436 size_t supported_scene_modes_cnt = 0;
9437 count = CAM_SCENE_MODE_MAX;
9438 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9439 for (size_t i = 0; i < count; i++) {
9440 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9441 CAM_SCENE_MODE_OFF) {
9442 int val = lookupFwkName(SCENE_MODES_MAP,
9443 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9444 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009445
Thierry Strudel3d639192016-09-09 11:52:26 -07009446 if (NAME_NOT_FOUND != val) {
9447 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9448 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9449 supported_scene_modes_cnt++;
9450 }
9451 }
9452 }
9453 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9454 avail_scene_modes,
9455 supported_scene_modes_cnt);
9456
9457 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9458 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9459 supported_scene_modes_cnt,
9460 CAM_SCENE_MODE_MAX,
9461 scene_mode_overrides,
9462 supported_indexes,
9463 cameraId);
9464
9465 if (supported_scene_modes_cnt == 0) {
9466 supported_scene_modes_cnt = 1;
9467 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9468 }
9469
9470 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9471 scene_mode_overrides, supported_scene_modes_cnt * 3);
9472
9473 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9474 ANDROID_CONTROL_MODE_AUTO,
9475 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9476 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9477 available_control_modes,
9478 3);
9479
9480 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9481 size = 0;
9482 count = CAM_ANTIBANDING_MODE_MAX;
9483 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9484 for (size_t i = 0; i < count; i++) {
9485 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9486 gCamCapability[cameraId]->supported_antibandings[i]);
9487 if (NAME_NOT_FOUND != val) {
9488 avail_antibanding_modes[size] = (uint8_t)val;
9489 size++;
9490 }
9491
9492 }
9493 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9494 avail_antibanding_modes,
9495 size);
9496
9497 uint8_t avail_abberation_modes[] = {
9498 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9499 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9500 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9501 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9502 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9503 if (0 == count) {
9504 // If no aberration correction modes are available for a device, this advertise OFF mode
9505 size = 1;
9506 } else {
9507 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9508 // So, advertize all 3 modes if atleast any one mode is supported as per the
9509 // new M requirement
9510 size = 3;
9511 }
9512 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9513 avail_abberation_modes,
9514 size);
9515
9516 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9517 size = 0;
9518 count = CAM_FOCUS_MODE_MAX;
9519 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9520 for (size_t i = 0; i < count; i++) {
9521 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9522 gCamCapability[cameraId]->supported_focus_modes[i]);
9523 if (NAME_NOT_FOUND != val) {
9524 avail_af_modes[size] = (uint8_t)val;
9525 size++;
9526 }
9527 }
9528 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9529 avail_af_modes,
9530 size);
9531
9532 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9533 size = 0;
9534 count = CAM_WB_MODE_MAX;
9535 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9536 for (size_t i = 0; i < count; i++) {
9537 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9538 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9539 gCamCapability[cameraId]->supported_white_balances[i]);
9540 if (NAME_NOT_FOUND != val) {
9541 avail_awb_modes[size] = (uint8_t)val;
9542 size++;
9543 }
9544 }
9545 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9546 avail_awb_modes,
9547 size);
9548
9549 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9550 count = CAM_FLASH_FIRING_LEVEL_MAX;
9551 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9552 count);
9553 for (size_t i = 0; i < count; i++) {
9554 available_flash_levels[i] =
9555 gCamCapability[cameraId]->supported_firing_levels[i];
9556 }
9557 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9558 available_flash_levels, count);
9559
9560 uint8_t flashAvailable;
9561 if (gCamCapability[cameraId]->flash_available)
9562 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9563 else
9564 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9565 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9566 &flashAvailable, 1);
9567
9568 Vector<uint8_t> avail_ae_modes;
9569 count = CAM_AE_MODE_MAX;
9570 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9571 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009572 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9573 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9574 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9575 }
9576 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009577 }
9578 if (flashAvailable) {
9579 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9580 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009581 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
Thierry Strudel3d639192016-09-09 11:52:26 -07009582 }
9583 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9584 avail_ae_modes.array(),
9585 avail_ae_modes.size());
9586
9587 int32_t sensitivity_range[2];
9588 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9589 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9590 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9591 sensitivity_range,
9592 sizeof(sensitivity_range) / sizeof(int32_t));
9593
9594 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9595 &gCamCapability[cameraId]->max_analog_sensitivity,
9596 1);
9597
9598 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9599 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9600 &sensor_orientation,
9601 1);
9602
9603 int32_t max_output_streams[] = {
9604 MAX_STALLING_STREAMS,
9605 MAX_PROCESSED_STREAMS,
9606 MAX_RAW_STREAMS};
9607 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9608 max_output_streams,
9609 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9610
9611 uint8_t avail_leds = 0;
9612 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9613 &avail_leds, 0);
9614
9615 uint8_t focus_dist_calibrated;
9616 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9617 gCamCapability[cameraId]->focus_dist_calibrated);
9618 if (NAME_NOT_FOUND != val) {
9619 focus_dist_calibrated = (uint8_t)val;
9620 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9621 &focus_dist_calibrated, 1);
9622 }
9623
9624 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9625 size = 0;
9626 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9627 MAX_TEST_PATTERN_CNT);
9628 for (size_t i = 0; i < count; i++) {
9629 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9630 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9631 if (NAME_NOT_FOUND != testpatternMode) {
9632 avail_testpattern_modes[size] = testpatternMode;
9633 size++;
9634 }
9635 }
9636 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9637 avail_testpattern_modes,
9638 size);
9639
9640 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9641 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9642 &max_pipeline_depth,
9643 1);
9644
9645 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9646 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9647 &partial_result_count,
9648 1);
9649
9650 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9651 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9652
9653 Vector<uint8_t> available_capabilities;
9654 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9655 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9656 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9657 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9658 if (supportBurst) {
9659 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9660 }
9661 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9662 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9663 if (hfrEnable && available_hfr_configs.array()) {
9664 available_capabilities.add(
9665 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9666 }
9667
9668 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9669 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9670 }
9671 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9672 available_capabilities.array(),
9673 available_capabilities.size());
9674
9675 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9676 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9677 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9678 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9679
9680 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9681 &aeLockAvailable, 1);
9682
9683 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9684 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9685 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9686 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9687
9688 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9689 &awbLockAvailable, 1);
9690
9691 int32_t max_input_streams = 1;
9692 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9693 &max_input_streams,
9694 1);
9695
9696 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9697 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9698 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9699 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9700 HAL_PIXEL_FORMAT_YCbCr_420_888};
9701 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9702 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9703
9704 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9705 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9706 &max_latency,
9707 1);
9708
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009709#ifndef USE_HAL_3_3
9710 int32_t isp_sensitivity_range[2];
9711 isp_sensitivity_range[0] =
9712 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9713 isp_sensitivity_range[1] =
9714 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9715 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9716 isp_sensitivity_range,
9717 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9718#endif
9719
Thierry Strudel3d639192016-09-09 11:52:26 -07009720 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9721 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9722 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9723 available_hot_pixel_modes,
9724 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9725
9726 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9727 ANDROID_SHADING_MODE_FAST,
9728 ANDROID_SHADING_MODE_HIGH_QUALITY};
9729 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9730 available_shading_modes,
9731 3);
9732
9733 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9734 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9735 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9736 available_lens_shading_map_modes,
9737 2);
9738
9739 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9740 ANDROID_EDGE_MODE_FAST,
9741 ANDROID_EDGE_MODE_HIGH_QUALITY,
9742 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9743 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9744 available_edge_modes,
9745 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9746
9747 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9748 ANDROID_NOISE_REDUCTION_MODE_FAST,
9749 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9750 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9751 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9752 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9753 available_noise_red_modes,
9754 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9755
9756 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9757 ANDROID_TONEMAP_MODE_FAST,
9758 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9759 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9760 available_tonemap_modes,
9761 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9762
9763 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9764 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9765 available_hot_pixel_map_modes,
9766 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9767
9768 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9769 gCamCapability[cameraId]->reference_illuminant1);
9770 if (NAME_NOT_FOUND != val) {
9771 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9772 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9773 }
9774
9775 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9776 gCamCapability[cameraId]->reference_illuminant2);
9777 if (NAME_NOT_FOUND != val) {
9778 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9779 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9780 }
9781
9782 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9783 (void *)gCamCapability[cameraId]->forward_matrix1,
9784 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9785
9786 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9787 (void *)gCamCapability[cameraId]->forward_matrix2,
9788 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9789
9790 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9791 (void *)gCamCapability[cameraId]->color_transform1,
9792 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9793
9794 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9795 (void *)gCamCapability[cameraId]->color_transform2,
9796 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9797
9798 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9799 (void *)gCamCapability[cameraId]->calibration_transform1,
9800 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9801
9802 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9803 (void *)gCamCapability[cameraId]->calibration_transform2,
9804 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9805
9806 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9807 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9808 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9809 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9810 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9811 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9812 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9813 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9814 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9815 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9816 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9817 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9818 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9819 ANDROID_JPEG_GPS_COORDINATES,
9820 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9821 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9822 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9823 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9824 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9825 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9826 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9827 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9828 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9829 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009830#ifndef USE_HAL_3_3
9831 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9832#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009833 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009834 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009835 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9836 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009837 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009838 /* DevCamDebug metadata request_keys_basic */
9839 DEVCAMDEBUG_META_ENABLE,
9840 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009841 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07009842 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
9843 NEXUS_EXPERIMENTAL_2017_SENSOR_MODE_FULLFOV
Samuel Ha68ba5172016-12-15 18:41:12 -08009844 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009845
9846 size_t request_keys_cnt =
9847 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9848 Vector<int32_t> available_request_keys;
9849 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9850 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9851 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9852 }
9853
9854 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9855 available_request_keys.array(), available_request_keys.size());
9856
9857 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9858 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9859 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9860 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9861 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9862 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9863 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9864 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9865 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9866 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9867 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9868 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9869 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9870 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9871 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9872 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9873 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009874 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009875 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9876 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9877 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009878 ANDROID_STATISTICS_FACE_SCORES,
9879#ifndef USE_HAL_3_3
9880 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9881#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009882 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009883 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009884 // DevCamDebug metadata result_keys_basic
9885 DEVCAMDEBUG_META_ENABLE,
9886 // DevCamDebug metadata result_keys AF
9887 DEVCAMDEBUG_AF_LENS_POSITION,
9888 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9889 DEVCAMDEBUG_AF_TOF_DISTANCE,
9890 DEVCAMDEBUG_AF_LUMA,
9891 DEVCAMDEBUG_AF_HAF_STATE,
9892 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9893 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9894 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9895 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9896 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9897 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9898 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9899 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9900 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9901 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9902 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9903 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9904 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9905 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9906 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9907 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9908 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9909 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9910 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9911 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9912 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9913 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9914 // DevCamDebug metadata result_keys AEC
9915 DEVCAMDEBUG_AEC_TARGET_LUMA,
9916 DEVCAMDEBUG_AEC_COMP_LUMA,
9917 DEVCAMDEBUG_AEC_AVG_LUMA,
9918 DEVCAMDEBUG_AEC_CUR_LUMA,
9919 DEVCAMDEBUG_AEC_LINECOUNT,
9920 DEVCAMDEBUG_AEC_REAL_GAIN,
9921 DEVCAMDEBUG_AEC_EXP_INDEX,
9922 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -08009923 // DevCamDebug metadata result_keys zzHDR
9924 DEVCAMDEBUG_AEC_L_REAL_GAIN,
9925 DEVCAMDEBUG_AEC_L_LINECOUNT,
9926 DEVCAMDEBUG_AEC_S_REAL_GAIN,
9927 DEVCAMDEBUG_AEC_S_LINECOUNT,
9928 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
9929 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
9930 // DevCamDebug metadata result_keys ADRC
9931 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
9932 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
9933 DEVCAMDEBUG_AEC_GTM_RATIO,
9934 DEVCAMDEBUG_AEC_LTM_RATIO,
9935 DEVCAMDEBUG_AEC_LA_RATIO,
9936 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -08009937 // DevCamDebug metadata result_keys AWB
9938 DEVCAMDEBUG_AWB_R_GAIN,
9939 DEVCAMDEBUG_AWB_G_GAIN,
9940 DEVCAMDEBUG_AWB_B_GAIN,
9941 DEVCAMDEBUG_AWB_CCT,
9942 DEVCAMDEBUG_AWB_DECISION,
9943 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009944 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9945 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
9946 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009947 };
9948
Thierry Strudel3d639192016-09-09 11:52:26 -07009949 size_t result_keys_cnt =
9950 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9951
9952 Vector<int32_t> available_result_keys;
9953 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9954 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9955 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9956 }
9957 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9958 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9959 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9960 }
9961 if (supportedFaceDetectMode == 1) {
9962 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9963 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9964 } else if ((supportedFaceDetectMode == 2) ||
9965 (supportedFaceDetectMode == 3)) {
9966 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9967 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9968 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009969#ifndef USE_HAL_3_3
9970 if (hasBlackRegions) {
9971 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9972 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9973 }
9974#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009975 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9976 available_result_keys.array(), available_result_keys.size());
9977
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009978 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009979 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9980 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9981 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9982 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9983 ANDROID_SCALER_CROPPING_TYPE,
9984 ANDROID_SYNC_MAX_LATENCY,
9985 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9986 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9987 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9988 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9989 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9990 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9991 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9992 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9993 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9994 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9995 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9996 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9997 ANDROID_LENS_FACING,
9998 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9999 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10000 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10001 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10002 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10003 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10004 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10005 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10006 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10007 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10008 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10009 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10010 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10011 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10012 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10013 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10014 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10015 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10016 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10017 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010018 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010019 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10020 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10021 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10022 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10023 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10024 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10025 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10026 ANDROID_CONTROL_AVAILABLE_MODES,
10027 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10028 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10029 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10030 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010031 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10032#ifndef USE_HAL_3_3
10033 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10034 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10035#endif
10036 };
10037
10038 Vector<int32_t> available_characteristics_keys;
10039 available_characteristics_keys.appendArray(characteristics_keys_basic,
10040 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10041#ifndef USE_HAL_3_3
10042 if (hasBlackRegions) {
10043 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10044 }
10045#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010046
10047 if (0 <= indexPD) {
10048 int32_t depthKeys[] = {
10049 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10050 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10051 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10052 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10053 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10054 };
10055 available_characteristics_keys.appendArray(depthKeys,
10056 sizeof(depthKeys) / sizeof(depthKeys[0]));
10057 }
10058
Thierry Strudel3d639192016-09-09 11:52:26 -070010059 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010060 available_characteristics_keys.array(),
10061 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010062
10063 /*available stall durations depend on the hw + sw and will be different for different devices */
10064 /*have to add for raw after implementation*/
10065 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10066 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10067
10068 Vector<int64_t> available_stall_durations;
10069 for (uint32_t j = 0; j < stall_formats_count; j++) {
10070 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10071 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10072 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10073 available_stall_durations.add(stall_formats[j]);
10074 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10075 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10076 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10077 }
10078 } else {
10079 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10080 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10081 available_stall_durations.add(stall_formats[j]);
10082 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10083 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10084 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10085 }
10086 }
10087 }
10088 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10089 available_stall_durations.array(),
10090 available_stall_durations.size());
10091
10092 //QCAMERA3_OPAQUE_RAW
10093 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10094 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10095 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10096 case LEGACY_RAW:
10097 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10098 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10099 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10100 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10101 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10102 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10103 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10104 break;
10105 case MIPI_RAW:
10106 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10107 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10108 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10109 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10110 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10111 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10112 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10113 break;
10114 default:
10115 LOGE("unknown opaque_raw_format %d",
10116 gCamCapability[cameraId]->opaque_raw_fmt);
10117 break;
10118 }
10119 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10120
10121 Vector<int32_t> strides;
10122 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10123 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10124 cam_stream_buf_plane_info_t buf_planes;
10125 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10126 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10127 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10128 &gCamCapability[cameraId]->padding_info, &buf_planes);
10129 strides.add(buf_planes.plane_info.mp[0].stride);
10130 }
10131 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10132 strides.size());
10133
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010134 //TBD: remove the following line once backend advertises zzHDR in feature mask
10135 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010136 //Video HDR default
10137 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10138 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010139 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010140 int32_t vhdr_mode[] = {
10141 QCAMERA3_VIDEO_HDR_MODE_OFF,
10142 QCAMERA3_VIDEO_HDR_MODE_ON};
10143
10144 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10145 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10146 vhdr_mode, vhdr_mode_count);
10147 }
10148
Thierry Strudel3d639192016-09-09 11:52:26 -070010149 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10150 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10151 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10152
10153 uint8_t isMonoOnly =
10154 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10155 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10156 &isMonoOnly, 1);
10157
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010158#ifndef USE_HAL_3_3
10159 Vector<int32_t> opaque_size;
10160 for (size_t j = 0; j < scalar_formats_count; j++) {
10161 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10162 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10163 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10164 cam_stream_buf_plane_info_t buf_planes;
10165
10166 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10167 &gCamCapability[cameraId]->padding_info, &buf_planes);
10168
10169 if (rc == 0) {
10170 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10171 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10172 opaque_size.add(buf_planes.plane_info.frame_len);
10173 }else {
10174 LOGE("raw frame calculation failed!");
10175 }
10176 }
10177 }
10178 }
10179
10180 if ((opaque_size.size() > 0) &&
10181 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10182 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10183 else
10184 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10185#endif
10186
Thierry Strudel04e026f2016-10-10 11:27:36 -070010187 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10188 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10189 size = 0;
10190 count = CAM_IR_MODE_MAX;
10191 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10192 for (size_t i = 0; i < count; i++) {
10193 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10194 gCamCapability[cameraId]->supported_ir_modes[i]);
10195 if (NAME_NOT_FOUND != val) {
10196 avail_ir_modes[size] = (int32_t)val;
10197 size++;
10198 }
10199 }
10200 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10201 avail_ir_modes, size);
10202 }
10203
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010204 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10205 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10206 size = 0;
10207 count = CAM_AEC_CONVERGENCE_MAX;
10208 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10209 for (size_t i = 0; i < count; i++) {
10210 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10211 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10212 if (NAME_NOT_FOUND != val) {
10213 available_instant_aec_modes[size] = (int32_t)val;
10214 size++;
10215 }
10216 }
10217 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10218 available_instant_aec_modes, size);
10219 }
10220
Thierry Strudel54dc9782017-02-15 12:12:10 -080010221 int32_t sharpness_range[] = {
10222 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10223 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10224 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10225
10226 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10227 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10228 size = 0;
10229 count = CAM_BINNING_CORRECTION_MODE_MAX;
10230 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10231 for (size_t i = 0; i < count; i++) {
10232 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10233 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10234 gCamCapability[cameraId]->supported_binning_modes[i]);
10235 if (NAME_NOT_FOUND != val) {
10236 avail_binning_modes[size] = (int32_t)val;
10237 size++;
10238 }
10239 }
10240 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10241 avail_binning_modes, size);
10242 }
10243
10244 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10245 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10246 size = 0;
10247 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10248 for (size_t i = 0; i < count; i++) {
10249 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10250 gCamCapability[cameraId]->supported_aec_modes[i]);
10251 if (NAME_NOT_FOUND != val)
10252 available_aec_modes[size++] = val;
10253 }
10254 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10255 available_aec_modes, size);
10256 }
10257
10258 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10259 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10260 size = 0;
10261 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10262 for (size_t i = 0; i < count; i++) {
10263 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10264 gCamCapability[cameraId]->supported_iso_modes[i]);
10265 if (NAME_NOT_FOUND != val)
10266 available_iso_modes[size++] = val;
10267 }
10268 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10269 available_iso_modes, size);
10270 }
10271
10272 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
10273 for (size_t i = 0; i < count; i++)
10274 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10275 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10276 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10277
10278 int32_t available_saturation_range[4];
10279 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10280 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10281 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10282 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10283 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10284 available_saturation_range, 4);
10285
10286 uint8_t is_hdr_values[2];
10287 is_hdr_values[0] = 0;
10288 is_hdr_values[1] = 1;
10289 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10290 is_hdr_values, 2);
10291
10292 float is_hdr_confidence_range[2];
10293 is_hdr_confidence_range[0] = 0.0;
10294 is_hdr_confidence_range[1] = 1.0;
10295 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10296 is_hdr_confidence_range, 2);
10297
Emilian Peev0a972ef2017-03-16 10:25:53 +000010298 size_t eepromLength = strnlen(
10299 reinterpret_cast<const char *>(
10300 gCamCapability[cameraId]->eeprom_version_info),
10301 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10302 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010303 char easelInfo[] = ",E:N";
10304 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10305 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10306 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010307 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10308 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010309 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010310 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10311 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10312 }
10313
Thierry Strudel3d639192016-09-09 11:52:26 -070010314 gStaticMetadata[cameraId] = staticInfo.release();
10315 return rc;
10316}
10317
10318/*===========================================================================
10319 * FUNCTION : makeTable
10320 *
10321 * DESCRIPTION: make a table of sizes
10322 *
10323 * PARAMETERS :
10324 *
10325 *
10326 *==========================================================================*/
10327void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10328 size_t max_size, int32_t *sizeTable)
10329{
10330 size_t j = 0;
10331 if (size > max_size) {
10332 size = max_size;
10333 }
10334 for (size_t i = 0; i < size; i++) {
10335 sizeTable[j] = dimTable[i].width;
10336 sizeTable[j+1] = dimTable[i].height;
10337 j+=2;
10338 }
10339}
10340
10341/*===========================================================================
10342 * FUNCTION : makeFPSTable
10343 *
10344 * DESCRIPTION: make a table of fps ranges
10345 *
10346 * PARAMETERS :
10347 *
10348 *==========================================================================*/
10349void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10350 size_t max_size, int32_t *fpsRangesTable)
10351{
10352 size_t j = 0;
10353 if (size > max_size) {
10354 size = max_size;
10355 }
10356 for (size_t i = 0; i < size; i++) {
10357 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10358 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10359 j+=2;
10360 }
10361}
10362
10363/*===========================================================================
10364 * FUNCTION : makeOverridesList
10365 *
10366 * DESCRIPTION: make a list of scene mode overrides
10367 *
10368 * PARAMETERS :
10369 *
10370 *
10371 *==========================================================================*/
10372void QCamera3HardwareInterface::makeOverridesList(
10373 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10374 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10375{
10376 /*daemon will give a list of overrides for all scene modes.
10377 However we should send the fwk only the overrides for the scene modes
10378 supported by the framework*/
10379 size_t j = 0;
10380 if (size > max_size) {
10381 size = max_size;
10382 }
10383 size_t focus_count = CAM_FOCUS_MODE_MAX;
10384 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10385 focus_count);
10386 for (size_t i = 0; i < size; i++) {
10387 bool supt = false;
10388 size_t index = supported_indexes[i];
10389 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10390 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10391 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10392 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10393 overridesTable[index].awb_mode);
10394 if (NAME_NOT_FOUND != val) {
10395 overridesList[j+1] = (uint8_t)val;
10396 }
10397 uint8_t focus_override = overridesTable[index].af_mode;
10398 for (size_t k = 0; k < focus_count; k++) {
10399 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10400 supt = true;
10401 break;
10402 }
10403 }
10404 if (supt) {
10405 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10406 focus_override);
10407 if (NAME_NOT_FOUND != val) {
10408 overridesList[j+2] = (uint8_t)val;
10409 }
10410 } else {
10411 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10412 }
10413 j+=3;
10414 }
10415}
10416
10417/*===========================================================================
10418 * FUNCTION : filterJpegSizes
10419 *
10420 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10421 * could be downscaled to
10422 *
10423 * PARAMETERS :
10424 *
10425 * RETURN : length of jpegSizes array
10426 *==========================================================================*/
10427
10428size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10429 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10430 uint8_t downscale_factor)
10431{
10432 if (0 == downscale_factor) {
10433 downscale_factor = 1;
10434 }
10435
10436 int32_t min_width = active_array_size.width / downscale_factor;
10437 int32_t min_height = active_array_size.height / downscale_factor;
10438 size_t jpegSizesCnt = 0;
10439 if (processedSizesCnt > maxCount) {
10440 processedSizesCnt = maxCount;
10441 }
10442 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10443 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10444 jpegSizes[jpegSizesCnt] = processedSizes[i];
10445 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10446 jpegSizesCnt += 2;
10447 }
10448 }
10449 return jpegSizesCnt;
10450}
10451
10452/*===========================================================================
10453 * FUNCTION : computeNoiseModelEntryS
10454 *
10455 * DESCRIPTION: function to map a given sensitivity to the S noise
10456 * model parameters in the DNG noise model.
10457 *
10458 * PARAMETERS : sens : the sensor sensitivity
10459 *
10460 ** RETURN : S (sensor amplification) noise
10461 *
10462 *==========================================================================*/
10463double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10464 double s = gCamCapability[mCameraId]->gradient_S * sens +
10465 gCamCapability[mCameraId]->offset_S;
10466 return ((s < 0.0) ? 0.0 : s);
10467}
10468
10469/*===========================================================================
10470 * FUNCTION : computeNoiseModelEntryO
10471 *
10472 * DESCRIPTION: function to map a given sensitivity to the O noise
10473 * model parameters in the DNG noise model.
10474 *
10475 * PARAMETERS : sens : the sensor sensitivity
10476 *
10477 ** RETURN : O (sensor readout) noise
10478 *
10479 *==========================================================================*/
10480double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10481 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10482 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10483 1.0 : (1.0 * sens / max_analog_sens);
10484 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10485 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10486 return ((o < 0.0) ? 0.0 : o);
10487}
10488
10489/*===========================================================================
10490 * FUNCTION : getSensorSensitivity
10491 *
10492 * DESCRIPTION: convert iso_mode to an integer value
10493 *
10494 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10495 *
10496 ** RETURN : sensitivity supported by sensor
10497 *
10498 *==========================================================================*/
10499int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10500{
10501 int32_t sensitivity;
10502
10503 switch (iso_mode) {
10504 case CAM_ISO_MODE_100:
10505 sensitivity = 100;
10506 break;
10507 case CAM_ISO_MODE_200:
10508 sensitivity = 200;
10509 break;
10510 case CAM_ISO_MODE_400:
10511 sensitivity = 400;
10512 break;
10513 case CAM_ISO_MODE_800:
10514 sensitivity = 800;
10515 break;
10516 case CAM_ISO_MODE_1600:
10517 sensitivity = 1600;
10518 break;
10519 default:
10520 sensitivity = -1;
10521 break;
10522 }
10523 return sensitivity;
10524}
10525
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010526int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010527 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010528 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10529 // to connect to Easel.
10530 bool doNotpowerOnEasel =
10531 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10532
10533 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010534 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10535 return OK;
10536 }
10537
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010538 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010539 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010540 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010541 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010542 return res;
10543 }
10544
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010545 EaselManagerClientOpened = true;
10546
10547 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010548 if (res != OK) {
10549 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10550 }
10551
10552 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010553 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010554 }
10555
10556 return OK;
10557}
10558
Thierry Strudel3d639192016-09-09 11:52:26 -070010559/*===========================================================================
10560 * FUNCTION : getCamInfo
10561 *
10562 * DESCRIPTION: query camera capabilities
10563 *
10564 * PARAMETERS :
10565 * @cameraId : camera Id
10566 * @info : camera info struct to be filled in with camera capabilities
10567 *
10568 * RETURN : int type of status
10569 * NO_ERROR -- success
10570 * none-zero failure code
10571 *==========================================================================*/
10572int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10573 struct camera_info *info)
10574{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010575 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010576 int rc = 0;
10577
10578 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010579
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010580 {
10581 Mutex::Autolock l(gHdrPlusClientLock);
10582 rc = initHdrPlusClientLocked();
10583 if (rc != OK) {
10584 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10585 pthread_mutex_unlock(&gCamLock);
10586 return rc;
10587 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010588 }
10589
Thierry Strudel3d639192016-09-09 11:52:26 -070010590 if (NULL == gCamCapability[cameraId]) {
10591 rc = initCapabilities(cameraId);
10592 if (rc < 0) {
10593 pthread_mutex_unlock(&gCamLock);
10594 return rc;
10595 }
10596 }
10597
10598 if (NULL == gStaticMetadata[cameraId]) {
10599 rc = initStaticMetadata(cameraId);
10600 if (rc < 0) {
10601 pthread_mutex_unlock(&gCamLock);
10602 return rc;
10603 }
10604 }
10605
10606 switch(gCamCapability[cameraId]->position) {
10607 case CAM_POSITION_BACK:
10608 case CAM_POSITION_BACK_AUX:
10609 info->facing = CAMERA_FACING_BACK;
10610 break;
10611
10612 case CAM_POSITION_FRONT:
10613 case CAM_POSITION_FRONT_AUX:
10614 info->facing = CAMERA_FACING_FRONT;
10615 break;
10616
10617 default:
10618 LOGE("Unknown position type %d for camera id:%d",
10619 gCamCapability[cameraId]->position, cameraId);
10620 rc = -1;
10621 break;
10622 }
10623
10624
10625 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010626#ifndef USE_HAL_3_3
10627 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10628#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010629 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010630#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010631 info->static_camera_characteristics = gStaticMetadata[cameraId];
10632
10633 //For now assume both cameras can operate independently.
10634 info->conflicting_devices = NULL;
10635 info->conflicting_devices_length = 0;
10636
10637 //resource cost is 100 * MIN(1.0, m/M),
10638 //where m is throughput requirement with maximum stream configuration
10639 //and M is CPP maximum throughput.
10640 float max_fps = 0.0;
10641 for (uint32_t i = 0;
10642 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10643 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10644 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10645 }
10646 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10647 gCamCapability[cameraId]->active_array_size.width *
10648 gCamCapability[cameraId]->active_array_size.height * max_fps /
10649 gCamCapability[cameraId]->max_pixel_bandwidth;
10650 info->resource_cost = 100 * MIN(1.0, ratio);
10651 LOGI("camera %d resource cost is %d", cameraId,
10652 info->resource_cost);
10653
10654 pthread_mutex_unlock(&gCamLock);
10655 return rc;
10656}
10657
10658/*===========================================================================
10659 * FUNCTION : translateCapabilityToMetadata
10660 *
10661 * DESCRIPTION: translate the capability into camera_metadata_t
10662 *
10663 * PARAMETERS : type of the request
10664 *
10665 *
10666 * RETURN : success: camera_metadata_t*
10667 * failure: NULL
10668 *
10669 *==========================================================================*/
10670camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10671{
10672 if (mDefaultMetadata[type] != NULL) {
10673 return mDefaultMetadata[type];
10674 }
10675 //first time we are handling this request
10676 //fill up the metadata structure using the wrapper class
10677 CameraMetadata settings;
10678 //translate from cam_capability_t to camera_metadata_tag_t
10679 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10680 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10681 int32_t defaultRequestID = 0;
10682 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10683
10684 /* OIS disable */
10685 char ois_prop[PROPERTY_VALUE_MAX];
10686 memset(ois_prop, 0, sizeof(ois_prop));
10687 property_get("persist.camera.ois.disable", ois_prop, "0");
10688 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10689
10690 /* Force video to use OIS */
10691 char videoOisProp[PROPERTY_VALUE_MAX];
10692 memset(videoOisProp, 0, sizeof(videoOisProp));
10693 property_get("persist.camera.ois.video", videoOisProp, "1");
10694 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010695
10696 // Hybrid AE enable/disable
10697 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10698 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10699 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10700 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10701
Thierry Strudel3d639192016-09-09 11:52:26 -070010702 uint8_t controlIntent = 0;
10703 uint8_t focusMode;
10704 uint8_t vsMode;
10705 uint8_t optStabMode;
10706 uint8_t cacMode;
10707 uint8_t edge_mode;
10708 uint8_t noise_red_mode;
10709 uint8_t tonemap_mode;
10710 bool highQualityModeEntryAvailable = FALSE;
10711 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010712 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010713 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10714 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010715 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010716 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010717
Thierry Strudel3d639192016-09-09 11:52:26 -070010718 switch (type) {
10719 case CAMERA3_TEMPLATE_PREVIEW:
10720 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10721 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10722 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10723 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10724 edge_mode = ANDROID_EDGE_MODE_FAST;
10725 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10726 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10727 break;
10728 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10729 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10730 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10731 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10732 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10733 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10734 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10735 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10736 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10737 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10738 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10739 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10740 highQualityModeEntryAvailable = TRUE;
10741 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10742 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10743 fastModeEntryAvailable = TRUE;
10744 }
10745 }
10746 if (highQualityModeEntryAvailable) {
10747 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10748 } else if (fastModeEntryAvailable) {
10749 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10750 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010751 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10752 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10753 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010754 break;
10755 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10756 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10757 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10758 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010759 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10760 edge_mode = ANDROID_EDGE_MODE_FAST;
10761 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10762 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10763 if (forceVideoOis)
10764 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10765 break;
10766 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10767 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10768 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10769 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010770 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10771 edge_mode = ANDROID_EDGE_MODE_FAST;
10772 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10773 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10774 if (forceVideoOis)
10775 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10776 break;
10777 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10778 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10779 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10780 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10781 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10782 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10783 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10784 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10785 break;
10786 case CAMERA3_TEMPLATE_MANUAL:
10787 edge_mode = ANDROID_EDGE_MODE_FAST;
10788 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10789 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10790 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10791 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10792 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10793 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10794 break;
10795 default:
10796 edge_mode = ANDROID_EDGE_MODE_FAST;
10797 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10798 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10799 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10800 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10801 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10802 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10803 break;
10804 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010805 // Set CAC to OFF if underlying device doesn't support
10806 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10807 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10808 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010809 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10810 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10811 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10812 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10813 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10814 }
10815 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010816 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010817 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010818
10819 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10820 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10821 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10822 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10823 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10824 || ois_disable)
10825 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10826 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010827 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010828
10829 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10830 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10831
10832 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10833 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10834
10835 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10836 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10837
10838 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10839 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10840
10841 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10842 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10843
10844 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10845 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10846
10847 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10848 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10849
10850 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10851 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10852
10853 /*flash*/
10854 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10855 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10856
10857 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10858 settings.update(ANDROID_FLASH_FIRING_POWER,
10859 &flashFiringLevel, 1);
10860
10861 /* lens */
10862 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10863 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10864
10865 if (gCamCapability[mCameraId]->filter_densities_count) {
10866 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10867 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10868 gCamCapability[mCameraId]->filter_densities_count);
10869 }
10870
10871 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10872 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10873
Thierry Strudel3d639192016-09-09 11:52:26 -070010874 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10875 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10876
10877 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10878 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10879
10880 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10881 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10882
10883 /* face detection (default to OFF) */
10884 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10885 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10886
Thierry Strudel54dc9782017-02-15 12:12:10 -080010887 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10888 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010889
10890 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10891 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10892
10893 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10894 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10895
Thierry Strudel3d639192016-09-09 11:52:26 -070010896
10897 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10898 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
10899
10900 /* Exposure time(Update the Min Exposure Time)*/
10901 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
10902 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
10903
10904 /* frame duration */
10905 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
10906 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
10907
10908 /* sensitivity */
10909 static const int32_t default_sensitivity = 100;
10910 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010911#ifndef USE_HAL_3_3
10912 static const int32_t default_isp_sensitivity =
10913 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10914 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
10915#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010916
10917 /*edge mode*/
10918 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
10919
10920 /*noise reduction mode*/
10921 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
10922
10923 /*color correction mode*/
10924 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
10925 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
10926
10927 /*transform matrix mode*/
10928 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
10929
10930 int32_t scaler_crop_region[4];
10931 scaler_crop_region[0] = 0;
10932 scaler_crop_region[1] = 0;
10933 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
10934 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10935 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10936
10937 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10938 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10939
10940 /*focus distance*/
10941 float focus_distance = 0.0;
10942 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10943
10944 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010945 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010946 float max_range = 0.0;
10947 float max_fixed_fps = 0.0;
10948 int32_t fps_range[2] = {0, 0};
10949 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10950 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010951 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10952 TEMPLATE_MAX_PREVIEW_FPS) {
10953 continue;
10954 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010955 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10956 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10957 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10958 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10959 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10960 if (range > max_range) {
10961 fps_range[0] =
10962 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10963 fps_range[1] =
10964 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10965 max_range = range;
10966 }
10967 } else {
10968 if (range < 0.01 && max_fixed_fps <
10969 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
10970 fps_range[0] =
10971 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10972 fps_range[1] =
10973 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10974 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10975 }
10976 }
10977 }
10978 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
10979
10980 /*precapture trigger*/
10981 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
10982 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
10983
10984 /*af trigger*/
10985 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
10986 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
10987
10988 /* ae & af regions */
10989 int32_t active_region[] = {
10990 gCamCapability[mCameraId]->active_array_size.left,
10991 gCamCapability[mCameraId]->active_array_size.top,
10992 gCamCapability[mCameraId]->active_array_size.left +
10993 gCamCapability[mCameraId]->active_array_size.width,
10994 gCamCapability[mCameraId]->active_array_size.top +
10995 gCamCapability[mCameraId]->active_array_size.height,
10996 0};
10997 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
10998 sizeof(active_region) / sizeof(active_region[0]));
10999 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11000 sizeof(active_region) / sizeof(active_region[0]));
11001
11002 /* black level lock */
11003 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11004 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11005
Thierry Strudel3d639192016-09-09 11:52:26 -070011006 //special defaults for manual template
11007 if (type == CAMERA3_TEMPLATE_MANUAL) {
11008 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11009 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11010
11011 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11012 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11013
11014 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11015 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11016
11017 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11018 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11019
11020 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11021 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11022
11023 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11024 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11025 }
11026
11027
11028 /* TNR
11029 * We'll use this location to determine which modes TNR will be set.
11030 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11031 * This is not to be confused with linking on a per stream basis that decision
11032 * is still on per-session basis and will be handled as part of config stream
11033 */
11034 uint8_t tnr_enable = 0;
11035
11036 if (m_bTnrPreview || m_bTnrVideo) {
11037
11038 switch (type) {
11039 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11040 tnr_enable = 1;
11041 break;
11042
11043 default:
11044 tnr_enable = 0;
11045 break;
11046 }
11047
11048 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11049 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11050 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11051
11052 LOGD("TNR:%d with process plate %d for template:%d",
11053 tnr_enable, tnr_process_type, type);
11054 }
11055
11056 //Update Link tags to default
11057 int32_t sync_type = CAM_TYPE_STANDALONE;
11058 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11059
11060 int32_t is_main = 0; //this doesn't matter as app should overwrite
11061 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11062
11063 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
11064
11065 /* CDS default */
11066 char prop[PROPERTY_VALUE_MAX];
11067 memset(prop, 0, sizeof(prop));
11068 property_get("persist.camera.CDS", prop, "Auto");
11069 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11070 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11071 if (CAM_CDS_MODE_MAX == cds_mode) {
11072 cds_mode = CAM_CDS_MODE_AUTO;
11073 }
11074
11075 /* Disabling CDS in templates which have TNR enabled*/
11076 if (tnr_enable)
11077 cds_mode = CAM_CDS_MODE_OFF;
11078
11079 int32_t mode = cds_mode;
11080 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011081
Thierry Strudel269c81a2016-10-12 12:13:59 -070011082 /* Manual Convergence AEC Speed is disabled by default*/
11083 float default_aec_speed = 0;
11084 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11085
11086 /* Manual Convergence AWB Speed is disabled by default*/
11087 float default_awb_speed = 0;
11088 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11089
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011090 // Set instant AEC to normal convergence by default
11091 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11092 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11093
Shuzhen Wang19463d72016-03-08 11:09:52 -080011094 /* hybrid ae */
11095 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11096
Thierry Strudel3d639192016-09-09 11:52:26 -070011097 mDefaultMetadata[type] = settings.release();
11098
11099 return mDefaultMetadata[type];
11100}
11101
11102/*===========================================================================
11103 * FUNCTION : setFrameParameters
11104 *
11105 * DESCRIPTION: set parameters per frame as requested in the metadata from
11106 * framework
11107 *
11108 * PARAMETERS :
11109 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011110 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011111 * @blob_request: Whether this request is a blob request or not
11112 *
11113 * RETURN : success: NO_ERROR
11114 * failure:
11115 *==========================================================================*/
11116int QCamera3HardwareInterface::setFrameParameters(
11117 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011118 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011119 int blob_request,
11120 uint32_t snapshotStreamId)
11121{
11122 /*translate from camera_metadata_t type to parm_type_t*/
11123 int rc = 0;
11124 int32_t hal_version = CAM_HAL_V3;
11125
11126 clear_metadata_buffer(mParameters);
11127 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11128 LOGE("Failed to set hal version in the parameters");
11129 return BAD_VALUE;
11130 }
11131
11132 /*we need to update the frame number in the parameters*/
11133 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11134 request->frame_number)) {
11135 LOGE("Failed to set the frame number in the parameters");
11136 return BAD_VALUE;
11137 }
11138
11139 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011140 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011141 LOGE("Failed to set stream type mask in the parameters");
11142 return BAD_VALUE;
11143 }
11144
11145 if (mUpdateDebugLevel) {
11146 uint32_t dummyDebugLevel = 0;
11147 /* The value of dummyDebugLevel is irrelavent. On
11148 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11149 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11150 dummyDebugLevel)) {
11151 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11152 return BAD_VALUE;
11153 }
11154 mUpdateDebugLevel = false;
11155 }
11156
11157 if(request->settings != NULL){
11158 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11159 if (blob_request)
11160 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11161 }
11162
11163 return rc;
11164}
11165
11166/*===========================================================================
11167 * FUNCTION : setReprocParameters
11168 *
11169 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11170 * return it.
11171 *
11172 * PARAMETERS :
11173 * @request : request that needs to be serviced
11174 *
11175 * RETURN : success: NO_ERROR
11176 * failure:
11177 *==========================================================================*/
11178int32_t QCamera3HardwareInterface::setReprocParameters(
11179 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11180 uint32_t snapshotStreamId)
11181{
11182 /*translate from camera_metadata_t type to parm_type_t*/
11183 int rc = 0;
11184
11185 if (NULL == request->settings){
11186 LOGE("Reprocess settings cannot be NULL");
11187 return BAD_VALUE;
11188 }
11189
11190 if (NULL == reprocParam) {
11191 LOGE("Invalid reprocessing metadata buffer");
11192 return BAD_VALUE;
11193 }
11194 clear_metadata_buffer(reprocParam);
11195
11196 /*we need to update the frame number in the parameters*/
11197 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11198 request->frame_number)) {
11199 LOGE("Failed to set the frame number in the parameters");
11200 return BAD_VALUE;
11201 }
11202
11203 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11204 if (rc < 0) {
11205 LOGE("Failed to translate reproc request");
11206 return rc;
11207 }
11208
11209 CameraMetadata frame_settings;
11210 frame_settings = request->settings;
11211 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11212 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11213 int32_t *crop_count =
11214 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11215 int32_t *crop_data =
11216 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11217 int32_t *roi_map =
11218 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11219 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11220 cam_crop_data_t crop_meta;
11221 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11222 crop_meta.num_of_streams = 1;
11223 crop_meta.crop_info[0].crop.left = crop_data[0];
11224 crop_meta.crop_info[0].crop.top = crop_data[1];
11225 crop_meta.crop_info[0].crop.width = crop_data[2];
11226 crop_meta.crop_info[0].crop.height = crop_data[3];
11227
11228 crop_meta.crop_info[0].roi_map.left =
11229 roi_map[0];
11230 crop_meta.crop_info[0].roi_map.top =
11231 roi_map[1];
11232 crop_meta.crop_info[0].roi_map.width =
11233 roi_map[2];
11234 crop_meta.crop_info[0].roi_map.height =
11235 roi_map[3];
11236
11237 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11238 rc = BAD_VALUE;
11239 }
11240 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11241 request->input_buffer->stream,
11242 crop_meta.crop_info[0].crop.left,
11243 crop_meta.crop_info[0].crop.top,
11244 crop_meta.crop_info[0].crop.width,
11245 crop_meta.crop_info[0].crop.height);
11246 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11247 request->input_buffer->stream,
11248 crop_meta.crop_info[0].roi_map.left,
11249 crop_meta.crop_info[0].roi_map.top,
11250 crop_meta.crop_info[0].roi_map.width,
11251 crop_meta.crop_info[0].roi_map.height);
11252 } else {
11253 LOGE("Invalid reprocess crop count %d!", *crop_count);
11254 }
11255 } else {
11256 LOGE("No crop data from matching output stream");
11257 }
11258
11259 /* These settings are not needed for regular requests so handle them specially for
11260 reprocess requests; information needed for EXIF tags */
11261 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11262 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11263 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11264 if (NAME_NOT_FOUND != val) {
11265 uint32_t flashMode = (uint32_t)val;
11266 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11267 rc = BAD_VALUE;
11268 }
11269 } else {
11270 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11271 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11272 }
11273 } else {
11274 LOGH("No flash mode in reprocess settings");
11275 }
11276
11277 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11278 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11279 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11280 rc = BAD_VALUE;
11281 }
11282 } else {
11283 LOGH("No flash state in reprocess settings");
11284 }
11285
11286 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11287 uint8_t *reprocessFlags =
11288 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11289 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11290 *reprocessFlags)) {
11291 rc = BAD_VALUE;
11292 }
11293 }
11294
Thierry Strudel54dc9782017-02-15 12:12:10 -080011295 // Add exif debug data to internal metadata
11296 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11297 mm_jpeg_debug_exif_params_t *debug_params =
11298 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11299 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11300 // AE
11301 if (debug_params->ae_debug_params_valid == TRUE) {
11302 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11303 debug_params->ae_debug_params);
11304 }
11305 // AWB
11306 if (debug_params->awb_debug_params_valid == TRUE) {
11307 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11308 debug_params->awb_debug_params);
11309 }
11310 // AF
11311 if (debug_params->af_debug_params_valid == TRUE) {
11312 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11313 debug_params->af_debug_params);
11314 }
11315 // ASD
11316 if (debug_params->asd_debug_params_valid == TRUE) {
11317 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11318 debug_params->asd_debug_params);
11319 }
11320 // Stats
11321 if (debug_params->stats_debug_params_valid == TRUE) {
11322 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11323 debug_params->stats_debug_params);
11324 }
11325 // BE Stats
11326 if (debug_params->bestats_debug_params_valid == TRUE) {
11327 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11328 debug_params->bestats_debug_params);
11329 }
11330 // BHIST
11331 if (debug_params->bhist_debug_params_valid == TRUE) {
11332 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11333 debug_params->bhist_debug_params);
11334 }
11335 // 3A Tuning
11336 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11337 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11338 debug_params->q3a_tuning_debug_params);
11339 }
11340 }
11341
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011342 // Add metadata which reprocess needs
11343 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11344 cam_reprocess_info_t *repro_info =
11345 (cam_reprocess_info_t *)frame_settings.find
11346 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011347 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011348 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011349 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011350 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011351 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011352 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011353 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011354 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011355 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011356 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011357 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011358 repro_info->pipeline_flip);
11359 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11360 repro_info->af_roi);
11361 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11362 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011363 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11364 CAM_INTF_PARM_ROTATION metadata then has been added in
11365 translateToHalMetadata. HAL need to keep this new rotation
11366 metadata. Otherwise, the old rotation info saved in the vendor tag
11367 would be used */
11368 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11369 CAM_INTF_PARM_ROTATION, reprocParam) {
11370 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11371 } else {
11372 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011373 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011374 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011375 }
11376
11377 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11378 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11379 roi.width and roi.height would be the final JPEG size.
11380 For now, HAL only checks this for reprocess request */
11381 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11382 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11383 uint8_t *enable =
11384 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11385 if (*enable == TRUE) {
11386 int32_t *crop_data =
11387 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11388 cam_stream_crop_info_t crop_meta;
11389 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11390 crop_meta.stream_id = 0;
11391 crop_meta.crop.left = crop_data[0];
11392 crop_meta.crop.top = crop_data[1];
11393 crop_meta.crop.width = crop_data[2];
11394 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011395 // The JPEG crop roi should match cpp output size
11396 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11397 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11398 crop_meta.roi_map.left = 0;
11399 crop_meta.roi_map.top = 0;
11400 crop_meta.roi_map.width = cpp_crop->crop.width;
11401 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011402 }
11403 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11404 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011405 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011406 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011407 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11408 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011409 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011410 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11411
11412 // Add JPEG scale information
11413 cam_dimension_t scale_dim;
11414 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11415 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11416 int32_t *roi =
11417 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11418 scale_dim.width = roi[2];
11419 scale_dim.height = roi[3];
11420 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11421 scale_dim);
11422 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11423 scale_dim.width, scale_dim.height, mCameraId);
11424 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011425 }
11426 }
11427
11428 return rc;
11429}
11430
11431/*===========================================================================
11432 * FUNCTION : saveRequestSettings
11433 *
11434 * DESCRIPTION: Add any settings that might have changed to the request settings
11435 * and save the settings to be applied on the frame
11436 *
11437 * PARAMETERS :
11438 * @jpegMetadata : the extracted and/or modified jpeg metadata
11439 * @request : request with initial settings
11440 *
11441 * RETURN :
11442 * camera_metadata_t* : pointer to the saved request settings
11443 *==========================================================================*/
11444camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11445 const CameraMetadata &jpegMetadata,
11446 camera3_capture_request_t *request)
11447{
11448 camera_metadata_t *resultMetadata;
11449 CameraMetadata camMetadata;
11450 camMetadata = request->settings;
11451
11452 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11453 int32_t thumbnail_size[2];
11454 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11455 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11456 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11457 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11458 }
11459
11460 if (request->input_buffer != NULL) {
11461 uint8_t reprocessFlags = 1;
11462 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11463 (uint8_t*)&reprocessFlags,
11464 sizeof(reprocessFlags));
11465 }
11466
11467 resultMetadata = camMetadata.release();
11468 return resultMetadata;
11469}
11470
11471/*===========================================================================
11472 * FUNCTION : setHalFpsRange
11473 *
11474 * DESCRIPTION: set FPS range parameter
11475 *
11476 *
11477 * PARAMETERS :
11478 * @settings : Metadata from framework
11479 * @hal_metadata: Metadata buffer
11480 *
11481 *
11482 * RETURN : success: NO_ERROR
11483 * failure:
11484 *==========================================================================*/
11485int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11486 metadata_buffer_t *hal_metadata)
11487{
11488 int32_t rc = NO_ERROR;
11489 cam_fps_range_t fps_range;
11490 fps_range.min_fps = (float)
11491 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11492 fps_range.max_fps = (float)
11493 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11494 fps_range.video_min_fps = fps_range.min_fps;
11495 fps_range.video_max_fps = fps_range.max_fps;
11496
11497 LOGD("aeTargetFpsRange fps: [%f %f]",
11498 fps_range.min_fps, fps_range.max_fps);
11499 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11500 * follows:
11501 * ---------------------------------------------------------------|
11502 * Video stream is absent in configure_streams |
11503 * (Camcorder preview before the first video record |
11504 * ---------------------------------------------------------------|
11505 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11506 * | | | vid_min/max_fps|
11507 * ---------------------------------------------------------------|
11508 * NO | [ 30, 240] | 240 | [240, 240] |
11509 * |-------------|-------------|----------------|
11510 * | [240, 240] | 240 | [240, 240] |
11511 * ---------------------------------------------------------------|
11512 * Video stream is present in configure_streams |
11513 * ---------------------------------------------------------------|
11514 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11515 * | | | vid_min/max_fps|
11516 * ---------------------------------------------------------------|
11517 * NO | [ 30, 240] | 240 | [240, 240] |
11518 * (camcorder prev |-------------|-------------|----------------|
11519 * after video rec | [240, 240] | 240 | [240, 240] |
11520 * is stopped) | | | |
11521 * ---------------------------------------------------------------|
11522 * YES | [ 30, 240] | 240 | [240, 240] |
11523 * |-------------|-------------|----------------|
11524 * | [240, 240] | 240 | [240, 240] |
11525 * ---------------------------------------------------------------|
11526 * When Video stream is absent in configure_streams,
11527 * preview fps = sensor_fps / batchsize
11528 * Eg: for 240fps at batchSize 4, preview = 60fps
11529 * for 120fps at batchSize 4, preview = 30fps
11530 *
11531 * When video stream is present in configure_streams, preview fps is as per
11532 * the ratio of preview buffers to video buffers requested in process
11533 * capture request
11534 */
11535 mBatchSize = 0;
11536 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11537 fps_range.min_fps = fps_range.video_max_fps;
11538 fps_range.video_min_fps = fps_range.video_max_fps;
11539 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11540 fps_range.max_fps);
11541 if (NAME_NOT_FOUND != val) {
11542 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11543 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11544 return BAD_VALUE;
11545 }
11546
11547 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11548 /* If batchmode is currently in progress and the fps changes,
11549 * set the flag to restart the sensor */
11550 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11551 (mHFRVideoFps != fps_range.max_fps)) {
11552 mNeedSensorRestart = true;
11553 }
11554 mHFRVideoFps = fps_range.max_fps;
11555 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11556 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11557 mBatchSize = MAX_HFR_BATCH_SIZE;
11558 }
11559 }
11560 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11561
11562 }
11563 } else {
11564 /* HFR mode is session param in backend/ISP. This should be reset when
11565 * in non-HFR mode */
11566 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11567 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11568 return BAD_VALUE;
11569 }
11570 }
11571 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11572 return BAD_VALUE;
11573 }
11574 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11575 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11576 return rc;
11577}
11578
11579/*===========================================================================
11580 * FUNCTION : translateToHalMetadata
11581 *
11582 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11583 *
11584 *
11585 * PARAMETERS :
11586 * @request : request sent from framework
11587 *
11588 *
11589 * RETURN : success: NO_ERROR
11590 * failure:
11591 *==========================================================================*/
11592int QCamera3HardwareInterface::translateToHalMetadata
11593 (const camera3_capture_request_t *request,
11594 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011595 uint32_t snapshotStreamId) {
11596 if (request == nullptr || hal_metadata == nullptr) {
11597 return BAD_VALUE;
11598 }
11599
11600 int64_t minFrameDuration = getMinFrameDuration(request);
11601
11602 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11603 minFrameDuration);
11604}
11605
11606int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11607 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11608 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11609
Thierry Strudel3d639192016-09-09 11:52:26 -070011610 int rc = 0;
11611 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011612 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011613
11614 /* Do not change the order of the following list unless you know what you are
11615 * doing.
11616 * The order is laid out in such a way that parameters in the front of the table
11617 * may be used to override the parameters later in the table. Examples are:
11618 * 1. META_MODE should precede AEC/AWB/AF MODE
11619 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11620 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11621 * 4. Any mode should precede it's corresponding settings
11622 */
11623 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11624 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11625 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11626 rc = BAD_VALUE;
11627 }
11628 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11629 if (rc != NO_ERROR) {
11630 LOGE("extractSceneMode failed");
11631 }
11632 }
11633
11634 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11635 uint8_t fwk_aeMode =
11636 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11637 uint8_t aeMode;
11638 int32_t redeye;
11639
11640 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11641 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011642 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11643 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011644 } else {
11645 aeMode = CAM_AE_MODE_ON;
11646 }
11647 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11648 redeye = 1;
11649 } else {
11650 redeye = 0;
11651 }
11652
11653 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11654 fwk_aeMode);
11655 if (NAME_NOT_FOUND != val) {
11656 int32_t flashMode = (int32_t)val;
11657 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11658 }
11659
11660 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11661 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11662 rc = BAD_VALUE;
11663 }
11664 }
11665
11666 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11667 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11668 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11669 fwk_whiteLevel);
11670 if (NAME_NOT_FOUND != val) {
11671 uint8_t whiteLevel = (uint8_t)val;
11672 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11673 rc = BAD_VALUE;
11674 }
11675 }
11676 }
11677
11678 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11679 uint8_t fwk_cacMode =
11680 frame_settings.find(
11681 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11682 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11683 fwk_cacMode);
11684 if (NAME_NOT_FOUND != val) {
11685 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11686 bool entryAvailable = FALSE;
11687 // Check whether Frameworks set CAC mode is supported in device or not
11688 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11689 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11690 entryAvailable = TRUE;
11691 break;
11692 }
11693 }
11694 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11695 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11696 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11697 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11698 if (entryAvailable == FALSE) {
11699 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11700 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11701 } else {
11702 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11703 // High is not supported and so set the FAST as spec say's underlying
11704 // device implementation can be the same for both modes.
11705 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11706 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11707 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11708 // in order to avoid the fps drop due to high quality
11709 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11710 } else {
11711 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11712 }
11713 }
11714 }
11715 LOGD("Final cacMode is %d", cacMode);
11716 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11717 rc = BAD_VALUE;
11718 }
11719 } else {
11720 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11721 }
11722 }
11723
Thierry Strudel2896d122017-02-23 19:18:03 -080011724 char af_value[PROPERTY_VALUE_MAX];
11725 property_get("persist.camera.af.infinity", af_value, "0");
11726
Jason Lee84ae9972017-02-24 13:24:24 -080011727 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011728 if (atoi(af_value) == 0) {
11729 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011730 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011731 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11732 fwk_focusMode);
11733 if (NAME_NOT_FOUND != val) {
11734 uint8_t focusMode = (uint8_t)val;
11735 LOGD("set focus mode %d", focusMode);
11736 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11737 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11738 rc = BAD_VALUE;
11739 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011740 }
11741 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011742 } else {
11743 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11744 LOGE("Focus forced to infinity %d", focusMode);
11745 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11746 rc = BAD_VALUE;
11747 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011748 }
11749
Jason Lee84ae9972017-02-24 13:24:24 -080011750 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11751 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011752 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11753 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11754 focalDistance)) {
11755 rc = BAD_VALUE;
11756 }
11757 }
11758
11759 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11760 uint8_t fwk_antibandingMode =
11761 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11762 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11763 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11764 if (NAME_NOT_FOUND != val) {
11765 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011766 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11767 if (m60HzZone) {
11768 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11769 } else {
11770 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11771 }
11772 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011773 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11774 hal_antibandingMode)) {
11775 rc = BAD_VALUE;
11776 }
11777 }
11778 }
11779
11780 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11781 int32_t expCompensation = frame_settings.find(
11782 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11783 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11784 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11785 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11786 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011787 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011788 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11789 expCompensation)) {
11790 rc = BAD_VALUE;
11791 }
11792 }
11793
11794 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11795 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11796 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11797 rc = BAD_VALUE;
11798 }
11799 }
11800 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11801 rc = setHalFpsRange(frame_settings, hal_metadata);
11802 if (rc != NO_ERROR) {
11803 LOGE("setHalFpsRange failed");
11804 }
11805 }
11806
11807 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11808 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11809 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11810 rc = BAD_VALUE;
11811 }
11812 }
11813
11814 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11815 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11816 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11817 fwk_effectMode);
11818 if (NAME_NOT_FOUND != val) {
11819 uint8_t effectMode = (uint8_t)val;
11820 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11821 rc = BAD_VALUE;
11822 }
11823 }
11824 }
11825
11826 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11827 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11828 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11829 colorCorrectMode)) {
11830 rc = BAD_VALUE;
11831 }
11832 }
11833
11834 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11835 cam_color_correct_gains_t colorCorrectGains;
11836 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11837 colorCorrectGains.gains[i] =
11838 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11839 }
11840 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11841 colorCorrectGains)) {
11842 rc = BAD_VALUE;
11843 }
11844 }
11845
11846 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11847 cam_color_correct_matrix_t colorCorrectTransform;
11848 cam_rational_type_t transform_elem;
11849 size_t num = 0;
11850 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11851 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11852 transform_elem.numerator =
11853 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11854 transform_elem.denominator =
11855 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11856 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11857 num++;
11858 }
11859 }
11860 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11861 colorCorrectTransform)) {
11862 rc = BAD_VALUE;
11863 }
11864 }
11865
11866 cam_trigger_t aecTrigger;
11867 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11868 aecTrigger.trigger_id = -1;
11869 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11870 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11871 aecTrigger.trigger =
11872 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11873 aecTrigger.trigger_id =
11874 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11875 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11876 aecTrigger)) {
11877 rc = BAD_VALUE;
11878 }
11879 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11880 aecTrigger.trigger, aecTrigger.trigger_id);
11881 }
11882
11883 /*af_trigger must come with a trigger id*/
11884 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11885 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11886 cam_trigger_t af_trigger;
11887 af_trigger.trigger =
11888 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11889 af_trigger.trigger_id =
11890 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11891 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11892 rc = BAD_VALUE;
11893 }
11894 LOGD("AfTrigger: %d AfTriggerID: %d",
11895 af_trigger.trigger, af_trigger.trigger_id);
11896 }
11897
11898 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
11899 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
11900 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
11901 rc = BAD_VALUE;
11902 }
11903 }
11904 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
11905 cam_edge_application_t edge_application;
11906 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011907
Thierry Strudel3d639192016-09-09 11:52:26 -070011908 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
11909 edge_application.sharpness = 0;
11910 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011911 edge_application.sharpness =
11912 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
11913 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
11914 int32_t sharpness =
11915 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
11916 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
11917 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
11918 LOGD("Setting edge mode sharpness %d", sharpness);
11919 edge_application.sharpness = sharpness;
11920 }
11921 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011922 }
11923 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
11924 rc = BAD_VALUE;
11925 }
11926 }
11927
11928 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11929 int32_t respectFlashMode = 1;
11930 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11931 uint8_t fwk_aeMode =
11932 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011933 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
11934 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
11935 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011936 respectFlashMode = 0;
11937 LOGH("AE Mode controls flash, ignore android.flash.mode");
11938 }
11939 }
11940 if (respectFlashMode) {
11941 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11942 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11943 LOGH("flash mode after mapping %d", val);
11944 // To check: CAM_INTF_META_FLASH_MODE usage
11945 if (NAME_NOT_FOUND != val) {
11946 uint8_t flashMode = (uint8_t)val;
11947 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
11948 rc = BAD_VALUE;
11949 }
11950 }
11951 }
11952 }
11953
11954 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
11955 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
11956 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
11957 rc = BAD_VALUE;
11958 }
11959 }
11960
11961 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
11962 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
11963 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
11964 flashFiringTime)) {
11965 rc = BAD_VALUE;
11966 }
11967 }
11968
11969 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
11970 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
11971 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
11972 hotPixelMode)) {
11973 rc = BAD_VALUE;
11974 }
11975 }
11976
11977 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
11978 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
11979 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
11980 lensAperture)) {
11981 rc = BAD_VALUE;
11982 }
11983 }
11984
11985 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
11986 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
11987 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
11988 filterDensity)) {
11989 rc = BAD_VALUE;
11990 }
11991 }
11992
11993 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
11994 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
11995 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
11996 focalLength)) {
11997 rc = BAD_VALUE;
11998 }
11999 }
12000
12001 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12002 uint8_t optStabMode =
12003 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12004 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12005 optStabMode)) {
12006 rc = BAD_VALUE;
12007 }
12008 }
12009
12010 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12011 uint8_t videoStabMode =
12012 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12013 LOGD("videoStabMode from APP = %d", videoStabMode);
12014 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12015 videoStabMode)) {
12016 rc = BAD_VALUE;
12017 }
12018 }
12019
12020
12021 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12022 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12023 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12024 noiseRedMode)) {
12025 rc = BAD_VALUE;
12026 }
12027 }
12028
12029 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12030 float reprocessEffectiveExposureFactor =
12031 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12032 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12033 reprocessEffectiveExposureFactor)) {
12034 rc = BAD_VALUE;
12035 }
12036 }
12037
12038 cam_crop_region_t scalerCropRegion;
12039 bool scalerCropSet = false;
12040 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12041 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12042 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12043 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12044 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12045
12046 // Map coordinate system from active array to sensor output.
12047 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12048 scalerCropRegion.width, scalerCropRegion.height);
12049
12050 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12051 scalerCropRegion)) {
12052 rc = BAD_VALUE;
12053 }
12054 scalerCropSet = true;
12055 }
12056
12057 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12058 int64_t sensorExpTime =
12059 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12060 LOGD("setting sensorExpTime %lld", sensorExpTime);
12061 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12062 sensorExpTime)) {
12063 rc = BAD_VALUE;
12064 }
12065 }
12066
12067 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12068 int64_t sensorFrameDuration =
12069 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012070 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12071 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12072 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12073 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12074 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12075 sensorFrameDuration)) {
12076 rc = BAD_VALUE;
12077 }
12078 }
12079
12080 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12081 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12082 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12083 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12084 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12085 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12086 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12087 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12088 sensorSensitivity)) {
12089 rc = BAD_VALUE;
12090 }
12091 }
12092
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012093#ifndef USE_HAL_3_3
12094 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12095 int32_t ispSensitivity =
12096 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12097 if (ispSensitivity <
12098 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12099 ispSensitivity =
12100 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12101 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12102 }
12103 if (ispSensitivity >
12104 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12105 ispSensitivity =
12106 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12107 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12108 }
12109 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12110 ispSensitivity)) {
12111 rc = BAD_VALUE;
12112 }
12113 }
12114#endif
12115
Thierry Strudel3d639192016-09-09 11:52:26 -070012116 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12117 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12118 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12119 rc = BAD_VALUE;
12120 }
12121 }
12122
12123 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12124 uint8_t fwk_facedetectMode =
12125 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12126
12127 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12128 fwk_facedetectMode);
12129
12130 if (NAME_NOT_FOUND != val) {
12131 uint8_t facedetectMode = (uint8_t)val;
12132 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12133 facedetectMode)) {
12134 rc = BAD_VALUE;
12135 }
12136 }
12137 }
12138
Thierry Strudel54dc9782017-02-15 12:12:10 -080012139 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012140 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012141 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012142 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12143 histogramMode)) {
12144 rc = BAD_VALUE;
12145 }
12146 }
12147
12148 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12149 uint8_t sharpnessMapMode =
12150 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12151 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12152 sharpnessMapMode)) {
12153 rc = BAD_VALUE;
12154 }
12155 }
12156
12157 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12158 uint8_t tonemapMode =
12159 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12160 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12161 rc = BAD_VALUE;
12162 }
12163 }
12164 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12165 /*All tonemap channels will have the same number of points*/
12166 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12167 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12168 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12169 cam_rgb_tonemap_curves tonemapCurves;
12170 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12171 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12172 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12173 tonemapCurves.tonemap_points_cnt,
12174 CAM_MAX_TONEMAP_CURVE_SIZE);
12175 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12176 }
12177
12178 /* ch0 = G*/
12179 size_t point = 0;
12180 cam_tonemap_curve_t tonemapCurveGreen;
12181 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12182 for (size_t j = 0; j < 2; j++) {
12183 tonemapCurveGreen.tonemap_points[i][j] =
12184 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12185 point++;
12186 }
12187 }
12188 tonemapCurves.curves[0] = tonemapCurveGreen;
12189
12190 /* ch 1 = B */
12191 point = 0;
12192 cam_tonemap_curve_t tonemapCurveBlue;
12193 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12194 for (size_t j = 0; j < 2; j++) {
12195 tonemapCurveBlue.tonemap_points[i][j] =
12196 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12197 point++;
12198 }
12199 }
12200 tonemapCurves.curves[1] = tonemapCurveBlue;
12201
12202 /* ch 2 = R */
12203 point = 0;
12204 cam_tonemap_curve_t tonemapCurveRed;
12205 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12206 for (size_t j = 0; j < 2; j++) {
12207 tonemapCurveRed.tonemap_points[i][j] =
12208 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12209 point++;
12210 }
12211 }
12212 tonemapCurves.curves[2] = tonemapCurveRed;
12213
12214 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12215 tonemapCurves)) {
12216 rc = BAD_VALUE;
12217 }
12218 }
12219
12220 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12221 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12222 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12223 captureIntent)) {
12224 rc = BAD_VALUE;
12225 }
12226 }
12227
12228 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12229 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12230 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12231 blackLevelLock)) {
12232 rc = BAD_VALUE;
12233 }
12234 }
12235
12236 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12237 uint8_t lensShadingMapMode =
12238 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12239 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12240 lensShadingMapMode)) {
12241 rc = BAD_VALUE;
12242 }
12243 }
12244
12245 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12246 cam_area_t roi;
12247 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012248 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012249
12250 // Map coordinate system from active array to sensor output.
12251 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12252 roi.rect.height);
12253
12254 if (scalerCropSet) {
12255 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12256 }
12257 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12258 rc = BAD_VALUE;
12259 }
12260 }
12261
12262 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12263 cam_area_t roi;
12264 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012265 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012266
12267 // Map coordinate system from active array to sensor output.
12268 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12269 roi.rect.height);
12270
12271 if (scalerCropSet) {
12272 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12273 }
12274 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12275 rc = BAD_VALUE;
12276 }
12277 }
12278
12279 // CDS for non-HFR non-video mode
12280 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12281 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12282 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12283 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12284 LOGE("Invalid CDS mode %d!", *fwk_cds);
12285 } else {
12286 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12287 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12288 rc = BAD_VALUE;
12289 }
12290 }
12291 }
12292
Thierry Strudel04e026f2016-10-10 11:27:36 -070012293 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012294 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012295 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012296 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12297 }
12298 if (m_bVideoHdrEnabled)
12299 vhdr = CAM_VIDEO_HDR_MODE_ON;
12300
Thierry Strudel54dc9782017-02-15 12:12:10 -080012301 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12302
12303 if(vhdr != curr_hdr_state)
12304 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12305
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012306 rc = setVideoHdrMode(mParameters, vhdr);
12307 if (rc != NO_ERROR) {
12308 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012309 }
12310
12311 //IR
12312 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12313 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12314 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012315 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12316 uint8_t isIRon = 0;
12317
12318 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012319 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12320 LOGE("Invalid IR mode %d!", fwk_ir);
12321 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012322 if(isIRon != curr_ir_state )
12323 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12324
Thierry Strudel04e026f2016-10-10 11:27:36 -070012325 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12326 CAM_INTF_META_IR_MODE, fwk_ir)) {
12327 rc = BAD_VALUE;
12328 }
12329 }
12330 }
12331
Thierry Strudel54dc9782017-02-15 12:12:10 -080012332 //Binning Correction Mode
12333 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12334 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12335 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12336 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12337 || (0 > fwk_binning_correction)) {
12338 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12339 } else {
12340 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12341 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12342 rc = BAD_VALUE;
12343 }
12344 }
12345 }
12346
Thierry Strudel269c81a2016-10-12 12:13:59 -070012347 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12348 float aec_speed;
12349 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12350 LOGD("AEC Speed :%f", aec_speed);
12351 if ( aec_speed < 0 ) {
12352 LOGE("Invalid AEC mode %f!", aec_speed);
12353 } else {
12354 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12355 aec_speed)) {
12356 rc = BAD_VALUE;
12357 }
12358 }
12359 }
12360
12361 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12362 float awb_speed;
12363 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12364 LOGD("AWB Speed :%f", awb_speed);
12365 if ( awb_speed < 0 ) {
12366 LOGE("Invalid AWB mode %f!", awb_speed);
12367 } else {
12368 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12369 awb_speed)) {
12370 rc = BAD_VALUE;
12371 }
12372 }
12373 }
12374
Thierry Strudel3d639192016-09-09 11:52:26 -070012375 // TNR
12376 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12377 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12378 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012379 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012380 cam_denoise_param_t tnr;
12381 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12382 tnr.process_plates =
12383 (cam_denoise_process_type_t)frame_settings.find(
12384 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12385 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012386
12387 if(b_TnrRequested != curr_tnr_state)
12388 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12389
Thierry Strudel3d639192016-09-09 11:52:26 -070012390 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12391 rc = BAD_VALUE;
12392 }
12393 }
12394
Thierry Strudel54dc9782017-02-15 12:12:10 -080012395 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012396 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012397 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012398 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12399 *exposure_metering_mode)) {
12400 rc = BAD_VALUE;
12401 }
12402 }
12403
Thierry Strudel3d639192016-09-09 11:52:26 -070012404 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12405 int32_t fwk_testPatternMode =
12406 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12407 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12408 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12409
12410 if (NAME_NOT_FOUND != testPatternMode) {
12411 cam_test_pattern_data_t testPatternData;
12412 memset(&testPatternData, 0, sizeof(testPatternData));
12413 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12414 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12415 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12416 int32_t *fwk_testPatternData =
12417 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12418 testPatternData.r = fwk_testPatternData[0];
12419 testPatternData.b = fwk_testPatternData[3];
12420 switch (gCamCapability[mCameraId]->color_arrangement) {
12421 case CAM_FILTER_ARRANGEMENT_RGGB:
12422 case CAM_FILTER_ARRANGEMENT_GRBG:
12423 testPatternData.gr = fwk_testPatternData[1];
12424 testPatternData.gb = fwk_testPatternData[2];
12425 break;
12426 case CAM_FILTER_ARRANGEMENT_GBRG:
12427 case CAM_FILTER_ARRANGEMENT_BGGR:
12428 testPatternData.gr = fwk_testPatternData[2];
12429 testPatternData.gb = fwk_testPatternData[1];
12430 break;
12431 default:
12432 LOGE("color arrangement %d is not supported",
12433 gCamCapability[mCameraId]->color_arrangement);
12434 break;
12435 }
12436 }
12437 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12438 testPatternData)) {
12439 rc = BAD_VALUE;
12440 }
12441 } else {
12442 LOGE("Invalid framework sensor test pattern mode %d",
12443 fwk_testPatternMode);
12444 }
12445 }
12446
12447 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12448 size_t count = 0;
12449 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12450 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12451 gps_coords.data.d, gps_coords.count, count);
12452 if (gps_coords.count != count) {
12453 rc = BAD_VALUE;
12454 }
12455 }
12456
12457 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12458 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12459 size_t count = 0;
12460 const char *gps_methods_src = (const char *)
12461 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12462 memset(gps_methods, '\0', sizeof(gps_methods));
12463 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12464 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12465 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12466 if (GPS_PROCESSING_METHOD_SIZE != count) {
12467 rc = BAD_VALUE;
12468 }
12469 }
12470
12471 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12472 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12473 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12474 gps_timestamp)) {
12475 rc = BAD_VALUE;
12476 }
12477 }
12478
12479 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12480 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12481 cam_rotation_info_t rotation_info;
12482 if (orientation == 0) {
12483 rotation_info.rotation = ROTATE_0;
12484 } else if (orientation == 90) {
12485 rotation_info.rotation = ROTATE_90;
12486 } else if (orientation == 180) {
12487 rotation_info.rotation = ROTATE_180;
12488 } else if (orientation == 270) {
12489 rotation_info.rotation = ROTATE_270;
12490 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012491 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012492 rotation_info.streamId = snapshotStreamId;
12493 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12494 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12495 rc = BAD_VALUE;
12496 }
12497 }
12498
12499 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12500 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12501 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12502 rc = BAD_VALUE;
12503 }
12504 }
12505
12506 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12507 uint32_t thumb_quality = (uint32_t)
12508 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12509 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12510 thumb_quality)) {
12511 rc = BAD_VALUE;
12512 }
12513 }
12514
12515 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12516 cam_dimension_t dim;
12517 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12518 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12519 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12520 rc = BAD_VALUE;
12521 }
12522 }
12523
12524 // Internal metadata
12525 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12526 size_t count = 0;
12527 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12528 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12529 privatedata.data.i32, privatedata.count, count);
12530 if (privatedata.count != count) {
12531 rc = BAD_VALUE;
12532 }
12533 }
12534
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012535 // ISO/Exposure Priority
12536 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12537 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12538 cam_priority_mode_t mode =
12539 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12540 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12541 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12542 use_iso_exp_pty.previewOnly = FALSE;
12543 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12544 use_iso_exp_pty.value = *ptr;
12545
12546 if(CAM_ISO_PRIORITY == mode) {
12547 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12548 use_iso_exp_pty)) {
12549 rc = BAD_VALUE;
12550 }
12551 }
12552 else {
12553 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12554 use_iso_exp_pty)) {
12555 rc = BAD_VALUE;
12556 }
12557 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012558
12559 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12560 rc = BAD_VALUE;
12561 }
12562 }
12563 } else {
12564 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12565 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012566 }
12567 }
12568
12569 // Saturation
12570 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12571 int32_t* use_saturation =
12572 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12573 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12574 rc = BAD_VALUE;
12575 }
12576 }
12577
Thierry Strudel3d639192016-09-09 11:52:26 -070012578 // EV step
12579 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12580 gCamCapability[mCameraId]->exp_compensation_step)) {
12581 rc = BAD_VALUE;
12582 }
12583
12584 // CDS info
12585 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12586 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12587 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12588
12589 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12590 CAM_INTF_META_CDS_DATA, *cdsData)) {
12591 rc = BAD_VALUE;
12592 }
12593 }
12594
Shuzhen Wang19463d72016-03-08 11:09:52 -080012595 // Hybrid AE
12596 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12597 uint8_t *hybrid_ae = (uint8_t *)
12598 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12599
12600 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12601 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12602 rc = BAD_VALUE;
12603 }
12604 }
12605
Shuzhen Wang14415f52016-11-16 18:26:18 -080012606 // Histogram
12607 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12608 uint8_t histogramMode =
12609 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12610 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12611 histogramMode)) {
12612 rc = BAD_VALUE;
12613 }
12614 }
12615
12616 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12617 int32_t histogramBins =
12618 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12619 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12620 histogramBins)) {
12621 rc = BAD_VALUE;
12622 }
12623 }
12624
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012625 // Tracking AF
12626 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12627 uint8_t trackingAfTrigger =
12628 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12629 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12630 trackingAfTrigger)) {
12631 rc = BAD_VALUE;
12632 }
12633 }
12634
Thierry Strudel3d639192016-09-09 11:52:26 -070012635 return rc;
12636}
12637
12638/*===========================================================================
12639 * FUNCTION : captureResultCb
12640 *
12641 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12642 *
12643 * PARAMETERS :
12644 * @frame : frame information from mm-camera-interface
12645 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12646 * @userdata: userdata
12647 *
12648 * RETURN : NONE
12649 *==========================================================================*/
12650void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12651 camera3_stream_buffer_t *buffer,
12652 uint32_t frame_number, bool isInputBuffer, void *userdata)
12653{
12654 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12655 if (hw == NULL) {
12656 LOGE("Invalid hw %p", hw);
12657 return;
12658 }
12659
12660 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12661 return;
12662}
12663
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012664/*===========================================================================
12665 * FUNCTION : setBufferErrorStatus
12666 *
12667 * DESCRIPTION: Callback handler for channels to report any buffer errors
12668 *
12669 * PARAMETERS :
12670 * @ch : Channel on which buffer error is reported from
12671 * @frame_number : frame number on which buffer error is reported on
12672 * @buffer_status : buffer error status
12673 * @userdata: userdata
12674 *
12675 * RETURN : NONE
12676 *==========================================================================*/
12677void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12678 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12679{
12680 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12681 if (hw == NULL) {
12682 LOGE("Invalid hw %p", hw);
12683 return;
12684 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012685
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012686 hw->setBufferErrorStatus(ch, frame_number, err);
12687 return;
12688}
12689
12690void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12691 uint32_t frameNumber, camera3_buffer_status_t err)
12692{
12693 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12694 pthread_mutex_lock(&mMutex);
12695
12696 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12697 if (req.frame_number != frameNumber)
12698 continue;
12699 for (auto& k : req.mPendingBufferList) {
12700 if(k.stream->priv == ch) {
12701 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12702 }
12703 }
12704 }
12705
12706 pthread_mutex_unlock(&mMutex);
12707 return;
12708}
Thierry Strudel3d639192016-09-09 11:52:26 -070012709/*===========================================================================
12710 * FUNCTION : initialize
12711 *
12712 * DESCRIPTION: Pass framework callback pointers to HAL
12713 *
12714 * PARAMETERS :
12715 *
12716 *
12717 * RETURN : Success : 0
12718 * Failure: -ENODEV
12719 *==========================================================================*/
12720
12721int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12722 const camera3_callback_ops_t *callback_ops)
12723{
12724 LOGD("E");
12725 QCamera3HardwareInterface *hw =
12726 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12727 if (!hw) {
12728 LOGE("NULL camera device");
12729 return -ENODEV;
12730 }
12731
12732 int rc = hw->initialize(callback_ops);
12733 LOGD("X");
12734 return rc;
12735}
12736
12737/*===========================================================================
12738 * FUNCTION : configure_streams
12739 *
12740 * DESCRIPTION:
12741 *
12742 * PARAMETERS :
12743 *
12744 *
12745 * RETURN : Success: 0
12746 * Failure: -EINVAL (if stream configuration is invalid)
12747 * -ENODEV (fatal error)
12748 *==========================================================================*/
12749
12750int QCamera3HardwareInterface::configure_streams(
12751 const struct camera3_device *device,
12752 camera3_stream_configuration_t *stream_list)
12753{
12754 LOGD("E");
12755 QCamera3HardwareInterface *hw =
12756 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12757 if (!hw) {
12758 LOGE("NULL camera device");
12759 return -ENODEV;
12760 }
12761 int rc = hw->configureStreams(stream_list);
12762 LOGD("X");
12763 return rc;
12764}
12765
12766/*===========================================================================
12767 * FUNCTION : construct_default_request_settings
12768 *
12769 * DESCRIPTION: Configure a settings buffer to meet the required use case
12770 *
12771 * PARAMETERS :
12772 *
12773 *
12774 * RETURN : Success: Return valid metadata
12775 * Failure: Return NULL
12776 *==========================================================================*/
12777const camera_metadata_t* QCamera3HardwareInterface::
12778 construct_default_request_settings(const struct camera3_device *device,
12779 int type)
12780{
12781
12782 LOGD("E");
12783 camera_metadata_t* fwk_metadata = NULL;
12784 QCamera3HardwareInterface *hw =
12785 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12786 if (!hw) {
12787 LOGE("NULL camera device");
12788 return NULL;
12789 }
12790
12791 fwk_metadata = hw->translateCapabilityToMetadata(type);
12792
12793 LOGD("X");
12794 return fwk_metadata;
12795}
12796
12797/*===========================================================================
12798 * FUNCTION : process_capture_request
12799 *
12800 * DESCRIPTION:
12801 *
12802 * PARAMETERS :
12803 *
12804 *
12805 * RETURN :
12806 *==========================================================================*/
12807int QCamera3HardwareInterface::process_capture_request(
12808 const struct camera3_device *device,
12809 camera3_capture_request_t *request)
12810{
12811 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012812 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012813 QCamera3HardwareInterface *hw =
12814 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12815 if (!hw) {
12816 LOGE("NULL camera device");
12817 return -EINVAL;
12818 }
12819
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012820 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012821 LOGD("X");
12822 return rc;
12823}
12824
12825/*===========================================================================
12826 * FUNCTION : dump
12827 *
12828 * DESCRIPTION:
12829 *
12830 * PARAMETERS :
12831 *
12832 *
12833 * RETURN :
12834 *==========================================================================*/
12835
12836void QCamera3HardwareInterface::dump(
12837 const struct camera3_device *device, int fd)
12838{
12839 /* Log level property is read when "adb shell dumpsys media.camera" is
12840 called so that the log level can be controlled without restarting
12841 the media server */
12842 getLogLevel();
12843
12844 LOGD("E");
12845 QCamera3HardwareInterface *hw =
12846 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12847 if (!hw) {
12848 LOGE("NULL camera device");
12849 return;
12850 }
12851
12852 hw->dump(fd);
12853 LOGD("X");
12854 return;
12855}
12856
12857/*===========================================================================
12858 * FUNCTION : flush
12859 *
12860 * DESCRIPTION:
12861 *
12862 * PARAMETERS :
12863 *
12864 *
12865 * RETURN :
12866 *==========================================================================*/
12867
12868int QCamera3HardwareInterface::flush(
12869 const struct camera3_device *device)
12870{
12871 int rc;
12872 LOGD("E");
12873 QCamera3HardwareInterface *hw =
12874 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12875 if (!hw) {
12876 LOGE("NULL camera device");
12877 return -EINVAL;
12878 }
12879
12880 pthread_mutex_lock(&hw->mMutex);
12881 // Validate current state
12882 switch (hw->mState) {
12883 case STARTED:
12884 /* valid state */
12885 break;
12886
12887 case ERROR:
12888 pthread_mutex_unlock(&hw->mMutex);
12889 hw->handleCameraDeviceError();
12890 return -ENODEV;
12891
12892 default:
12893 LOGI("Flush returned during state %d", hw->mState);
12894 pthread_mutex_unlock(&hw->mMutex);
12895 return 0;
12896 }
12897 pthread_mutex_unlock(&hw->mMutex);
12898
12899 rc = hw->flush(true /* restart channels */ );
12900 LOGD("X");
12901 return rc;
12902}
12903
12904/*===========================================================================
12905 * FUNCTION : close_camera_device
12906 *
12907 * DESCRIPTION:
12908 *
12909 * PARAMETERS :
12910 *
12911 *
12912 * RETURN :
12913 *==========================================================================*/
12914int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
12915{
12916 int ret = NO_ERROR;
12917 QCamera3HardwareInterface *hw =
12918 reinterpret_cast<QCamera3HardwareInterface *>(
12919 reinterpret_cast<camera3_device_t *>(device)->priv);
12920 if (!hw) {
12921 LOGE("NULL camera device");
12922 return BAD_VALUE;
12923 }
12924
12925 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
12926 delete hw;
12927 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012928 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070012929 return ret;
12930}
12931
12932/*===========================================================================
12933 * FUNCTION : getWaveletDenoiseProcessPlate
12934 *
12935 * DESCRIPTION: query wavelet denoise process plate
12936 *
12937 * PARAMETERS : None
12938 *
12939 * RETURN : WNR prcocess plate value
12940 *==========================================================================*/
12941cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
12942{
12943 char prop[PROPERTY_VALUE_MAX];
12944 memset(prop, 0, sizeof(prop));
12945 property_get("persist.denoise.process.plates", prop, "0");
12946 int processPlate = atoi(prop);
12947 switch(processPlate) {
12948 case 0:
12949 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12950 case 1:
12951 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12952 case 2:
12953 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12954 case 3:
12955 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12956 default:
12957 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12958 }
12959}
12960
12961
12962/*===========================================================================
12963 * FUNCTION : getTemporalDenoiseProcessPlate
12964 *
12965 * DESCRIPTION: query temporal denoise process plate
12966 *
12967 * PARAMETERS : None
12968 *
12969 * RETURN : TNR prcocess plate value
12970 *==========================================================================*/
12971cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
12972{
12973 char prop[PROPERTY_VALUE_MAX];
12974 memset(prop, 0, sizeof(prop));
12975 property_get("persist.tnr.process.plates", prop, "0");
12976 int processPlate = atoi(prop);
12977 switch(processPlate) {
12978 case 0:
12979 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12980 case 1:
12981 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12982 case 2:
12983 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12984 case 3:
12985 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12986 default:
12987 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12988 }
12989}
12990
12991
12992/*===========================================================================
12993 * FUNCTION : extractSceneMode
12994 *
12995 * DESCRIPTION: Extract scene mode from frameworks set metadata
12996 *
12997 * PARAMETERS :
12998 * @frame_settings: CameraMetadata reference
12999 * @metaMode: ANDROID_CONTORL_MODE
13000 * @hal_metadata: hal metadata structure
13001 *
13002 * RETURN : None
13003 *==========================================================================*/
13004int32_t QCamera3HardwareInterface::extractSceneMode(
13005 const CameraMetadata &frame_settings, uint8_t metaMode,
13006 metadata_buffer_t *hal_metadata)
13007{
13008 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013009 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13010
13011 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13012 LOGD("Ignoring control mode OFF_KEEP_STATE");
13013 return NO_ERROR;
13014 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013015
13016 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13017 camera_metadata_ro_entry entry =
13018 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13019 if (0 == entry.count)
13020 return rc;
13021
13022 uint8_t fwk_sceneMode = entry.data.u8[0];
13023
13024 int val = lookupHalName(SCENE_MODES_MAP,
13025 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13026 fwk_sceneMode);
13027 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013028 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013029 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013030 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013031 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013032
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013033 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13034 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13035 }
13036
13037 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13038 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013039 cam_hdr_param_t hdr_params;
13040 hdr_params.hdr_enable = 1;
13041 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13042 hdr_params.hdr_need_1x = false;
13043 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13044 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13045 rc = BAD_VALUE;
13046 }
13047 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013048
Thierry Strudel3d639192016-09-09 11:52:26 -070013049 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13050 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13051 rc = BAD_VALUE;
13052 }
13053 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013054
13055 if (mForceHdrSnapshot) {
13056 cam_hdr_param_t hdr_params;
13057 hdr_params.hdr_enable = 1;
13058 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13059 hdr_params.hdr_need_1x = false;
13060 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13061 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13062 rc = BAD_VALUE;
13063 }
13064 }
13065
Thierry Strudel3d639192016-09-09 11:52:26 -070013066 return rc;
13067}
13068
13069/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013070 * FUNCTION : setVideoHdrMode
13071 *
13072 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13073 *
13074 * PARAMETERS :
13075 * @hal_metadata: hal metadata structure
13076 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13077 *
13078 * RETURN : None
13079 *==========================================================================*/
13080int32_t QCamera3HardwareInterface::setVideoHdrMode(
13081 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13082{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013083 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13084 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13085 }
13086
13087 LOGE("Invalid Video HDR mode %d!", vhdr);
13088 return BAD_VALUE;
13089}
13090
13091/*===========================================================================
13092 * FUNCTION : setSensorHDR
13093 *
13094 * DESCRIPTION: Enable/disable sensor HDR.
13095 *
13096 * PARAMETERS :
13097 * @hal_metadata: hal metadata structure
13098 * @enable: boolean whether to enable/disable sensor HDR
13099 *
13100 * RETURN : None
13101 *==========================================================================*/
13102int32_t QCamera3HardwareInterface::setSensorHDR(
13103 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13104{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013105 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013106 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13107
13108 if (enable) {
13109 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13110 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13111 #ifdef _LE_CAMERA_
13112 //Default to staggered HDR for IOT
13113 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13114 #else
13115 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13116 #endif
13117 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13118 }
13119
13120 bool isSupported = false;
13121 switch (sensor_hdr) {
13122 case CAM_SENSOR_HDR_IN_SENSOR:
13123 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13124 CAM_QCOM_FEATURE_SENSOR_HDR) {
13125 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013126 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013127 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013128 break;
13129 case CAM_SENSOR_HDR_ZIGZAG:
13130 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13131 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13132 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013133 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013134 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013135 break;
13136 case CAM_SENSOR_HDR_STAGGERED:
13137 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13138 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13139 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013140 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013141 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013142 break;
13143 case CAM_SENSOR_HDR_OFF:
13144 isSupported = true;
13145 LOGD("Turning off sensor HDR");
13146 break;
13147 default:
13148 LOGE("HDR mode %d not supported", sensor_hdr);
13149 rc = BAD_VALUE;
13150 break;
13151 }
13152
13153 if(isSupported) {
13154 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13155 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13156 rc = BAD_VALUE;
13157 } else {
13158 if(!isVideoHdrEnable)
13159 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013160 }
13161 }
13162 return rc;
13163}
13164
13165/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013166 * FUNCTION : needRotationReprocess
13167 *
13168 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13169 *
13170 * PARAMETERS : none
13171 *
13172 * RETURN : true: needed
13173 * false: no need
13174 *==========================================================================*/
13175bool QCamera3HardwareInterface::needRotationReprocess()
13176{
13177 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13178 // current rotation is not zero, and pp has the capability to process rotation
13179 LOGH("need do reprocess for rotation");
13180 return true;
13181 }
13182
13183 return false;
13184}
13185
13186/*===========================================================================
13187 * FUNCTION : needReprocess
13188 *
13189 * DESCRIPTION: if reprocess in needed
13190 *
13191 * PARAMETERS : none
13192 *
13193 * RETURN : true: needed
13194 * false: no need
13195 *==========================================================================*/
13196bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13197{
13198 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13199 // TODO: add for ZSL HDR later
13200 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13201 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13202 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13203 return true;
13204 } else {
13205 LOGH("already post processed frame");
13206 return false;
13207 }
13208 }
13209 return needRotationReprocess();
13210}
13211
13212/*===========================================================================
13213 * FUNCTION : needJpegExifRotation
13214 *
13215 * DESCRIPTION: if rotation from jpeg is needed
13216 *
13217 * PARAMETERS : none
13218 *
13219 * RETURN : true: needed
13220 * false: no need
13221 *==========================================================================*/
13222bool QCamera3HardwareInterface::needJpegExifRotation()
13223{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013224 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013225 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13226 LOGD("Need use Jpeg EXIF Rotation");
13227 return true;
13228 }
13229 return false;
13230}
13231
13232/*===========================================================================
13233 * FUNCTION : addOfflineReprocChannel
13234 *
13235 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13236 * coming from input channel
13237 *
13238 * PARAMETERS :
13239 * @config : reprocess configuration
13240 * @inputChHandle : pointer to the input (source) channel
13241 *
13242 *
13243 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13244 *==========================================================================*/
13245QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13246 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13247{
13248 int32_t rc = NO_ERROR;
13249 QCamera3ReprocessChannel *pChannel = NULL;
13250
13251 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013252 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13253 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013254 if (NULL == pChannel) {
13255 LOGE("no mem for reprocess channel");
13256 return NULL;
13257 }
13258
13259 rc = pChannel->initialize(IS_TYPE_NONE);
13260 if (rc != NO_ERROR) {
13261 LOGE("init reprocess channel failed, ret = %d", rc);
13262 delete pChannel;
13263 return NULL;
13264 }
13265
13266 // pp feature config
13267 cam_pp_feature_config_t pp_config;
13268 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13269
13270 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13271 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13272 & CAM_QCOM_FEATURE_DSDN) {
13273 //Use CPP CDS incase h/w supports it.
13274 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13275 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13276 }
13277 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13278 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13279 }
13280
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013281 if (config.hdr_param.hdr_enable) {
13282 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13283 pp_config.hdr_param = config.hdr_param;
13284 }
13285
13286 if (mForceHdrSnapshot) {
13287 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13288 pp_config.hdr_param.hdr_enable = 1;
13289 pp_config.hdr_param.hdr_need_1x = 0;
13290 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13291 }
13292
Thierry Strudel3d639192016-09-09 11:52:26 -070013293 rc = pChannel->addReprocStreamsFromSource(pp_config,
13294 config,
13295 IS_TYPE_NONE,
13296 mMetadataChannel);
13297
13298 if (rc != NO_ERROR) {
13299 delete pChannel;
13300 return NULL;
13301 }
13302 return pChannel;
13303}
13304
13305/*===========================================================================
13306 * FUNCTION : getMobicatMask
13307 *
13308 * DESCRIPTION: returns mobicat mask
13309 *
13310 * PARAMETERS : none
13311 *
13312 * RETURN : mobicat mask
13313 *
13314 *==========================================================================*/
13315uint8_t QCamera3HardwareInterface::getMobicatMask()
13316{
13317 return m_MobicatMask;
13318}
13319
13320/*===========================================================================
13321 * FUNCTION : setMobicat
13322 *
13323 * DESCRIPTION: set Mobicat on/off.
13324 *
13325 * PARAMETERS :
13326 * @params : none
13327 *
13328 * RETURN : int32_t type of status
13329 * NO_ERROR -- success
13330 * none-zero failure code
13331 *==========================================================================*/
13332int32_t QCamera3HardwareInterface::setMobicat()
13333{
13334 char value [PROPERTY_VALUE_MAX];
13335 property_get("persist.camera.mobicat", value, "0");
13336 int32_t ret = NO_ERROR;
13337 uint8_t enableMobi = (uint8_t)atoi(value);
13338
13339 if (enableMobi) {
13340 tune_cmd_t tune_cmd;
13341 tune_cmd.type = SET_RELOAD_CHROMATIX;
13342 tune_cmd.module = MODULE_ALL;
13343 tune_cmd.value = TRUE;
13344 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13345 CAM_INTF_PARM_SET_VFE_COMMAND,
13346 tune_cmd);
13347
13348 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13349 CAM_INTF_PARM_SET_PP_COMMAND,
13350 tune_cmd);
13351 }
13352 m_MobicatMask = enableMobi;
13353
13354 return ret;
13355}
13356
13357/*===========================================================================
13358* FUNCTION : getLogLevel
13359*
13360* DESCRIPTION: Reads the log level property into a variable
13361*
13362* PARAMETERS :
13363* None
13364*
13365* RETURN :
13366* None
13367*==========================================================================*/
13368void QCamera3HardwareInterface::getLogLevel()
13369{
13370 char prop[PROPERTY_VALUE_MAX];
13371 uint32_t globalLogLevel = 0;
13372
13373 property_get("persist.camera.hal.debug", prop, "0");
13374 int val = atoi(prop);
13375 if (0 <= val) {
13376 gCamHal3LogLevel = (uint32_t)val;
13377 }
13378
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013379 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013380 gKpiDebugLevel = atoi(prop);
13381
13382 property_get("persist.camera.global.debug", prop, "0");
13383 val = atoi(prop);
13384 if (0 <= val) {
13385 globalLogLevel = (uint32_t)val;
13386 }
13387
13388 /* Highest log level among hal.logs and global.logs is selected */
13389 if (gCamHal3LogLevel < globalLogLevel)
13390 gCamHal3LogLevel = globalLogLevel;
13391
13392 return;
13393}
13394
13395/*===========================================================================
13396 * FUNCTION : validateStreamRotations
13397 *
13398 * DESCRIPTION: Check if the rotations requested are supported
13399 *
13400 * PARAMETERS :
13401 * @stream_list : streams to be configured
13402 *
13403 * RETURN : NO_ERROR on success
13404 * -EINVAL on failure
13405 *
13406 *==========================================================================*/
13407int QCamera3HardwareInterface::validateStreamRotations(
13408 camera3_stream_configuration_t *streamList)
13409{
13410 int rc = NO_ERROR;
13411
13412 /*
13413 * Loop through all streams requested in configuration
13414 * Check if unsupported rotations have been requested on any of them
13415 */
13416 for (size_t j = 0; j < streamList->num_streams; j++){
13417 camera3_stream_t *newStream = streamList->streams[j];
13418
13419 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13420 bool isImplDef = (newStream->format ==
13421 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13422 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13423 isImplDef);
13424
13425 if (isRotated && (!isImplDef || isZsl)) {
13426 LOGE("Error: Unsupported rotation of %d requested for stream"
13427 "type:%d and stream format:%d",
13428 newStream->rotation, newStream->stream_type,
13429 newStream->format);
13430 rc = -EINVAL;
13431 break;
13432 }
13433 }
13434
13435 return rc;
13436}
13437
13438/*===========================================================================
13439* FUNCTION : getFlashInfo
13440*
13441* DESCRIPTION: Retrieve information about whether the device has a flash.
13442*
13443* PARAMETERS :
13444* @cameraId : Camera id to query
13445* @hasFlash : Boolean indicating whether there is a flash device
13446* associated with given camera
13447* @flashNode : If a flash device exists, this will be its device node.
13448*
13449* RETURN :
13450* None
13451*==========================================================================*/
13452void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13453 bool& hasFlash,
13454 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13455{
13456 cam_capability_t* camCapability = gCamCapability[cameraId];
13457 if (NULL == camCapability) {
13458 hasFlash = false;
13459 flashNode[0] = '\0';
13460 } else {
13461 hasFlash = camCapability->flash_available;
13462 strlcpy(flashNode,
13463 (char*)camCapability->flash_dev_name,
13464 QCAMERA_MAX_FILEPATH_LENGTH);
13465 }
13466}
13467
13468/*===========================================================================
13469* FUNCTION : getEepromVersionInfo
13470*
13471* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13472*
13473* PARAMETERS : None
13474*
13475* RETURN : string describing EEPROM version
13476* "\0" if no such info available
13477*==========================================================================*/
13478const char *QCamera3HardwareInterface::getEepromVersionInfo()
13479{
13480 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13481}
13482
13483/*===========================================================================
13484* FUNCTION : getLdafCalib
13485*
13486* DESCRIPTION: Retrieve Laser AF calibration data
13487*
13488* PARAMETERS : None
13489*
13490* RETURN : Two uint32_t describing laser AF calibration data
13491* NULL if none is available.
13492*==========================================================================*/
13493const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13494{
13495 if (mLdafCalibExist) {
13496 return &mLdafCalib[0];
13497 } else {
13498 return NULL;
13499 }
13500}
13501
13502/*===========================================================================
13503 * FUNCTION : dynamicUpdateMetaStreamInfo
13504 *
13505 * DESCRIPTION: This function:
13506 * (1) stops all the channels
13507 * (2) returns error on pending requests and buffers
13508 * (3) sends metastream_info in setparams
13509 * (4) starts all channels
13510 * This is useful when sensor has to be restarted to apply any
13511 * settings such as frame rate from a different sensor mode
13512 *
13513 * PARAMETERS : None
13514 *
13515 * RETURN : NO_ERROR on success
13516 * Error codes on failure
13517 *
13518 *==========================================================================*/
13519int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13520{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013521 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013522 int rc = NO_ERROR;
13523
13524 LOGD("E");
13525
13526 rc = stopAllChannels();
13527 if (rc < 0) {
13528 LOGE("stopAllChannels failed");
13529 return rc;
13530 }
13531
13532 rc = notifyErrorForPendingRequests();
13533 if (rc < 0) {
13534 LOGE("notifyErrorForPendingRequests failed");
13535 return rc;
13536 }
13537
13538 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13539 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13540 "Format:%d",
13541 mStreamConfigInfo.type[i],
13542 mStreamConfigInfo.stream_sizes[i].width,
13543 mStreamConfigInfo.stream_sizes[i].height,
13544 mStreamConfigInfo.postprocess_mask[i],
13545 mStreamConfigInfo.format[i]);
13546 }
13547
13548 /* Send meta stream info once again so that ISP can start */
13549 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13550 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13551 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13552 mParameters);
13553 if (rc < 0) {
13554 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13555 }
13556
13557 rc = startAllChannels();
13558 if (rc < 0) {
13559 LOGE("startAllChannels failed");
13560 return rc;
13561 }
13562
13563 LOGD("X");
13564 return rc;
13565}
13566
13567/*===========================================================================
13568 * FUNCTION : stopAllChannels
13569 *
13570 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13571 *
13572 * PARAMETERS : None
13573 *
13574 * RETURN : NO_ERROR on success
13575 * Error codes on failure
13576 *
13577 *==========================================================================*/
13578int32_t QCamera3HardwareInterface::stopAllChannels()
13579{
13580 int32_t rc = NO_ERROR;
13581
13582 LOGD("Stopping all channels");
13583 // Stop the Streams/Channels
13584 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13585 it != mStreamInfo.end(); it++) {
13586 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13587 if (channel) {
13588 channel->stop();
13589 }
13590 (*it)->status = INVALID;
13591 }
13592
13593 if (mSupportChannel) {
13594 mSupportChannel->stop();
13595 }
13596 if (mAnalysisChannel) {
13597 mAnalysisChannel->stop();
13598 }
13599 if (mRawDumpChannel) {
13600 mRawDumpChannel->stop();
13601 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013602 if (mHdrPlusRawSrcChannel) {
13603 mHdrPlusRawSrcChannel->stop();
13604 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013605 if (mMetadataChannel) {
13606 /* If content of mStreamInfo is not 0, there is metadata stream */
13607 mMetadataChannel->stop();
13608 }
13609
13610 LOGD("All channels stopped");
13611 return rc;
13612}
13613
13614/*===========================================================================
13615 * FUNCTION : startAllChannels
13616 *
13617 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13618 *
13619 * PARAMETERS : None
13620 *
13621 * RETURN : NO_ERROR on success
13622 * Error codes on failure
13623 *
13624 *==========================================================================*/
13625int32_t QCamera3HardwareInterface::startAllChannels()
13626{
13627 int32_t rc = NO_ERROR;
13628
13629 LOGD("Start all channels ");
13630 // Start the Streams/Channels
13631 if (mMetadataChannel) {
13632 /* If content of mStreamInfo is not 0, there is metadata stream */
13633 rc = mMetadataChannel->start();
13634 if (rc < 0) {
13635 LOGE("META channel start failed");
13636 return rc;
13637 }
13638 }
13639 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13640 it != mStreamInfo.end(); it++) {
13641 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13642 if (channel) {
13643 rc = channel->start();
13644 if (rc < 0) {
13645 LOGE("channel start failed");
13646 return rc;
13647 }
13648 }
13649 }
13650 if (mAnalysisChannel) {
13651 mAnalysisChannel->start();
13652 }
13653 if (mSupportChannel) {
13654 rc = mSupportChannel->start();
13655 if (rc < 0) {
13656 LOGE("Support channel start failed");
13657 return rc;
13658 }
13659 }
13660 if (mRawDumpChannel) {
13661 rc = mRawDumpChannel->start();
13662 if (rc < 0) {
13663 LOGE("RAW dump channel start failed");
13664 return rc;
13665 }
13666 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013667 if (mHdrPlusRawSrcChannel) {
13668 rc = mHdrPlusRawSrcChannel->start();
13669 if (rc < 0) {
13670 LOGE("HDR+ RAW channel start failed");
13671 return rc;
13672 }
13673 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013674
13675 LOGD("All channels started");
13676 return rc;
13677}
13678
13679/*===========================================================================
13680 * FUNCTION : notifyErrorForPendingRequests
13681 *
13682 * DESCRIPTION: This function sends error for all the pending requests/buffers
13683 *
13684 * PARAMETERS : None
13685 *
13686 * RETURN : Error codes
13687 * NO_ERROR on success
13688 *
13689 *==========================================================================*/
13690int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13691{
13692 int32_t rc = NO_ERROR;
13693 unsigned int frameNum = 0;
13694 camera3_capture_result_t result;
13695 camera3_stream_buffer_t *pStream_Buf = NULL;
13696
13697 memset(&result, 0, sizeof(camera3_capture_result_t));
13698
13699 if (mPendingRequestsList.size() > 0) {
13700 pendingRequestIterator i = mPendingRequestsList.begin();
13701 frameNum = i->frame_number;
13702 } else {
13703 /* There might still be pending buffers even though there are
13704 no pending requests. Setting the frameNum to MAX so that
13705 all the buffers with smaller frame numbers are returned */
13706 frameNum = UINT_MAX;
13707 }
13708
13709 LOGH("Oldest frame num on mPendingRequestsList = %u",
13710 frameNum);
13711
Emilian Peev7650c122017-01-19 08:24:33 -080013712 notifyErrorFoPendingDepthData(mDepthChannel);
13713
Thierry Strudel3d639192016-09-09 11:52:26 -070013714 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13715 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13716
13717 if (req->frame_number < frameNum) {
13718 // Send Error notify to frameworks for each buffer for which
13719 // metadata buffer is already sent
13720 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13721 req->frame_number, req->mPendingBufferList.size());
13722
13723 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13724 if (NULL == pStream_Buf) {
13725 LOGE("No memory for pending buffers array");
13726 return NO_MEMORY;
13727 }
13728 memset(pStream_Buf, 0,
13729 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13730 result.result = NULL;
13731 result.frame_number = req->frame_number;
13732 result.num_output_buffers = req->mPendingBufferList.size();
13733 result.output_buffers = pStream_Buf;
13734
13735 size_t index = 0;
13736 for (auto info = req->mPendingBufferList.begin();
13737 info != req->mPendingBufferList.end(); ) {
13738
13739 camera3_notify_msg_t notify_msg;
13740 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13741 notify_msg.type = CAMERA3_MSG_ERROR;
13742 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13743 notify_msg.message.error.error_stream = info->stream;
13744 notify_msg.message.error.frame_number = req->frame_number;
13745 pStream_Buf[index].acquire_fence = -1;
13746 pStream_Buf[index].release_fence = -1;
13747 pStream_Buf[index].buffer = info->buffer;
13748 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13749 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013750 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013751 index++;
13752 // Remove buffer from list
13753 info = req->mPendingBufferList.erase(info);
13754 }
13755
13756 // Remove this request from Map
13757 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13758 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13759 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13760
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013761 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013762
13763 delete [] pStream_Buf;
13764 } else {
13765
13766 // Go through the pending requests info and send error request to framework
13767 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13768
13769 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13770
13771 // Send error notify to frameworks
13772 camera3_notify_msg_t notify_msg;
13773 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13774 notify_msg.type = CAMERA3_MSG_ERROR;
13775 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13776 notify_msg.message.error.error_stream = NULL;
13777 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013778 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013779
13780 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13781 if (NULL == pStream_Buf) {
13782 LOGE("No memory for pending buffers array");
13783 return NO_MEMORY;
13784 }
13785 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13786
13787 result.result = NULL;
13788 result.frame_number = req->frame_number;
13789 result.input_buffer = i->input_buffer;
13790 result.num_output_buffers = req->mPendingBufferList.size();
13791 result.output_buffers = pStream_Buf;
13792
13793 size_t index = 0;
13794 for (auto info = req->mPendingBufferList.begin();
13795 info != req->mPendingBufferList.end(); ) {
13796 pStream_Buf[index].acquire_fence = -1;
13797 pStream_Buf[index].release_fence = -1;
13798 pStream_Buf[index].buffer = info->buffer;
13799 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13800 pStream_Buf[index].stream = info->stream;
13801 index++;
13802 // Remove buffer from list
13803 info = req->mPendingBufferList.erase(info);
13804 }
13805
13806 // Remove this request from Map
13807 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13808 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13809 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13810
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013811 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013812 delete [] pStream_Buf;
13813 i = erasePendingRequest(i);
13814 }
13815 }
13816
13817 /* Reset pending frame Drop list and requests list */
13818 mPendingFrameDropList.clear();
13819
13820 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13821 req.mPendingBufferList.clear();
13822 }
13823 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013824 LOGH("Cleared all the pending buffers ");
13825
13826 return rc;
13827}
13828
13829bool QCamera3HardwareInterface::isOnEncoder(
13830 const cam_dimension_t max_viewfinder_size,
13831 uint32_t width, uint32_t height)
13832{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013833 return ((width > (uint32_t)max_viewfinder_size.width) ||
13834 (height > (uint32_t)max_viewfinder_size.height) ||
13835 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13836 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013837}
13838
13839/*===========================================================================
13840 * FUNCTION : setBundleInfo
13841 *
13842 * DESCRIPTION: Set bundle info for all streams that are bundle.
13843 *
13844 * PARAMETERS : None
13845 *
13846 * RETURN : NO_ERROR on success
13847 * Error codes on failure
13848 *==========================================================================*/
13849int32_t QCamera3HardwareInterface::setBundleInfo()
13850{
13851 int32_t rc = NO_ERROR;
13852
13853 if (mChannelHandle) {
13854 cam_bundle_config_t bundleInfo;
13855 memset(&bundleInfo, 0, sizeof(bundleInfo));
13856 rc = mCameraHandle->ops->get_bundle_info(
13857 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13858 if (rc != NO_ERROR) {
13859 LOGE("get_bundle_info failed");
13860 return rc;
13861 }
13862 if (mAnalysisChannel) {
13863 mAnalysisChannel->setBundleInfo(bundleInfo);
13864 }
13865 if (mSupportChannel) {
13866 mSupportChannel->setBundleInfo(bundleInfo);
13867 }
13868 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13869 it != mStreamInfo.end(); it++) {
13870 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13871 channel->setBundleInfo(bundleInfo);
13872 }
13873 if (mRawDumpChannel) {
13874 mRawDumpChannel->setBundleInfo(bundleInfo);
13875 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013876 if (mHdrPlusRawSrcChannel) {
13877 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13878 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013879 }
13880
13881 return rc;
13882}
13883
13884/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013885 * FUNCTION : setInstantAEC
13886 *
13887 * DESCRIPTION: Set Instant AEC related params.
13888 *
13889 * PARAMETERS :
13890 * @meta: CameraMetadata reference
13891 *
13892 * RETURN : NO_ERROR on success
13893 * Error codes on failure
13894 *==========================================================================*/
13895int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13896{
13897 int32_t rc = NO_ERROR;
13898 uint8_t val = 0;
13899 char prop[PROPERTY_VALUE_MAX];
13900
13901 // First try to configure instant AEC from framework metadata
13902 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13903 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13904 }
13905
13906 // If framework did not set this value, try to read from set prop.
13907 if (val == 0) {
13908 memset(prop, 0, sizeof(prop));
13909 property_get("persist.camera.instant.aec", prop, "0");
13910 val = (uint8_t)atoi(prop);
13911 }
13912
13913 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
13914 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
13915 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
13916 mInstantAEC = val;
13917 mInstantAECSettledFrameNumber = 0;
13918 mInstantAecFrameIdxCount = 0;
13919 LOGH("instantAEC value set %d",val);
13920 if (mInstantAEC) {
13921 memset(prop, 0, sizeof(prop));
13922 property_get("persist.camera.ae.instant.bound", prop, "10");
13923 int32_t aec_frame_skip_cnt = atoi(prop);
13924 if (aec_frame_skip_cnt >= 0) {
13925 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
13926 } else {
13927 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
13928 rc = BAD_VALUE;
13929 }
13930 }
13931 } else {
13932 LOGE("Bad instant aec value set %d", val);
13933 rc = BAD_VALUE;
13934 }
13935 return rc;
13936}
13937
13938/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013939 * FUNCTION : get_num_overall_buffers
13940 *
13941 * DESCRIPTION: Estimate number of pending buffers across all requests.
13942 *
13943 * PARAMETERS : None
13944 *
13945 * RETURN : Number of overall pending buffers
13946 *
13947 *==========================================================================*/
13948uint32_t PendingBuffersMap::get_num_overall_buffers()
13949{
13950 uint32_t sum_buffers = 0;
13951 for (auto &req : mPendingBuffersInRequest) {
13952 sum_buffers += req.mPendingBufferList.size();
13953 }
13954 return sum_buffers;
13955}
13956
13957/*===========================================================================
13958 * FUNCTION : removeBuf
13959 *
13960 * DESCRIPTION: Remove a matching buffer from tracker.
13961 *
13962 * PARAMETERS : @buffer: image buffer for the callback
13963 *
13964 * RETURN : None
13965 *
13966 *==========================================================================*/
13967void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
13968{
13969 bool buffer_found = false;
13970 for (auto req = mPendingBuffersInRequest.begin();
13971 req != mPendingBuffersInRequest.end(); req++) {
13972 for (auto k = req->mPendingBufferList.begin();
13973 k != req->mPendingBufferList.end(); k++ ) {
13974 if (k->buffer == buffer) {
13975 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
13976 req->frame_number, buffer);
13977 k = req->mPendingBufferList.erase(k);
13978 if (req->mPendingBufferList.empty()) {
13979 // Remove this request from Map
13980 req = mPendingBuffersInRequest.erase(req);
13981 }
13982 buffer_found = true;
13983 break;
13984 }
13985 }
13986 if (buffer_found) {
13987 break;
13988 }
13989 }
13990 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
13991 get_num_overall_buffers());
13992}
13993
13994/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013995 * FUNCTION : getBufErrStatus
13996 *
13997 * DESCRIPTION: get buffer error status
13998 *
13999 * PARAMETERS : @buffer: buffer handle
14000 *
14001 * RETURN : Error status
14002 *
14003 *==========================================================================*/
14004int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14005{
14006 for (auto& req : mPendingBuffersInRequest) {
14007 for (auto& k : req.mPendingBufferList) {
14008 if (k.buffer == buffer)
14009 return k.bufStatus;
14010 }
14011 }
14012 return CAMERA3_BUFFER_STATUS_OK;
14013}
14014
14015/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014016 * FUNCTION : setPAAFSupport
14017 *
14018 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14019 * feature mask according to stream type and filter
14020 * arrangement
14021 *
14022 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14023 * @stream_type: stream type
14024 * @filter_arrangement: filter arrangement
14025 *
14026 * RETURN : None
14027 *==========================================================================*/
14028void QCamera3HardwareInterface::setPAAFSupport(
14029 cam_feature_mask_t& feature_mask,
14030 cam_stream_type_t stream_type,
14031 cam_color_filter_arrangement_t filter_arrangement)
14032{
Thierry Strudel3d639192016-09-09 11:52:26 -070014033 switch (filter_arrangement) {
14034 case CAM_FILTER_ARRANGEMENT_RGGB:
14035 case CAM_FILTER_ARRANGEMENT_GRBG:
14036 case CAM_FILTER_ARRANGEMENT_GBRG:
14037 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014038 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14039 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014040 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014041 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14042 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014043 }
14044 break;
14045 case CAM_FILTER_ARRANGEMENT_Y:
14046 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14047 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14048 }
14049 break;
14050 default:
14051 break;
14052 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014053 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14054 feature_mask, stream_type, filter_arrangement);
14055
14056
Thierry Strudel3d639192016-09-09 11:52:26 -070014057}
14058
14059/*===========================================================================
14060* FUNCTION : getSensorMountAngle
14061*
14062* DESCRIPTION: Retrieve sensor mount angle
14063*
14064* PARAMETERS : None
14065*
14066* RETURN : sensor mount angle in uint32_t
14067*==========================================================================*/
14068uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14069{
14070 return gCamCapability[mCameraId]->sensor_mount_angle;
14071}
14072
14073/*===========================================================================
14074* FUNCTION : getRelatedCalibrationData
14075*
14076* DESCRIPTION: Retrieve related system calibration data
14077*
14078* PARAMETERS : None
14079*
14080* RETURN : Pointer of related system calibration data
14081*==========================================================================*/
14082const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14083{
14084 return (const cam_related_system_calibration_data_t *)
14085 &(gCamCapability[mCameraId]->related_cam_calibration);
14086}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014087
14088/*===========================================================================
14089 * FUNCTION : is60HzZone
14090 *
14091 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14092 *
14093 * PARAMETERS : None
14094 *
14095 * RETURN : True if in 60Hz zone, False otherwise
14096 *==========================================================================*/
14097bool QCamera3HardwareInterface::is60HzZone()
14098{
14099 time_t t = time(NULL);
14100 struct tm lt;
14101
14102 struct tm* r = localtime_r(&t, &lt);
14103
14104 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14105 return true;
14106 else
14107 return false;
14108}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014109
14110/*===========================================================================
14111 * FUNCTION : adjustBlackLevelForCFA
14112 *
14113 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14114 * of bayer CFA (Color Filter Array).
14115 *
14116 * PARAMETERS : @input: black level pattern in the order of RGGB
14117 * @output: black level pattern in the order of CFA
14118 * @color_arrangement: CFA color arrangement
14119 *
14120 * RETURN : None
14121 *==========================================================================*/
14122template<typename T>
14123void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14124 T input[BLACK_LEVEL_PATTERN_CNT],
14125 T output[BLACK_LEVEL_PATTERN_CNT],
14126 cam_color_filter_arrangement_t color_arrangement)
14127{
14128 switch (color_arrangement) {
14129 case CAM_FILTER_ARRANGEMENT_GRBG:
14130 output[0] = input[1];
14131 output[1] = input[0];
14132 output[2] = input[3];
14133 output[3] = input[2];
14134 break;
14135 case CAM_FILTER_ARRANGEMENT_GBRG:
14136 output[0] = input[2];
14137 output[1] = input[3];
14138 output[2] = input[0];
14139 output[3] = input[1];
14140 break;
14141 case CAM_FILTER_ARRANGEMENT_BGGR:
14142 output[0] = input[3];
14143 output[1] = input[2];
14144 output[2] = input[1];
14145 output[3] = input[0];
14146 break;
14147 case CAM_FILTER_ARRANGEMENT_RGGB:
14148 output[0] = input[0];
14149 output[1] = input[1];
14150 output[2] = input[2];
14151 output[3] = input[3];
14152 break;
14153 default:
14154 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14155 break;
14156 }
14157}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014158
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014159void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14160 CameraMetadata &resultMetadata,
14161 std::shared_ptr<metadata_buffer_t> settings)
14162{
14163 if (settings == nullptr) {
14164 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14165 return;
14166 }
14167
14168 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14169 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14170 }
14171
14172 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14173 String8 str((const char *)gps_methods);
14174 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14175 }
14176
14177 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14178 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14179 }
14180
14181 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14182 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14183 }
14184
14185 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14186 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14187 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14188 }
14189
14190 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14191 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14192 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14193 }
14194
14195 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14196 int32_t fwk_thumb_size[2];
14197 fwk_thumb_size[0] = thumb_size->width;
14198 fwk_thumb_size[1] = thumb_size->height;
14199 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14200 }
14201
14202 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14203 uint8_t fwk_intent = intent[0];
14204 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14205 }
14206}
14207
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014208bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14209 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14210 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014211{
14212 if (hdrPlusRequest == nullptr) return false;
14213
14214 // Check noise reduction mode is high quality.
14215 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14216 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14217 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014218 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14219 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014220 return false;
14221 }
14222
14223 // Check edge mode is high quality.
14224 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14225 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14226 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14227 return false;
14228 }
14229
14230 if (request.num_output_buffers != 1 ||
14231 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14232 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014233 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14234 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14235 request.output_buffers[0].stream->width,
14236 request.output_buffers[0].stream->height,
14237 request.output_buffers[0].stream->format);
14238 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014239 return false;
14240 }
14241
14242 // Get a YUV buffer from pic channel.
14243 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14244 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14245 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14246 if (res != OK) {
14247 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14248 __FUNCTION__, strerror(-res), res);
14249 return false;
14250 }
14251
14252 pbcamera::StreamBuffer buffer;
14253 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014254 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014255 buffer.data = yuvBuffer->buffer;
14256 buffer.dataSize = yuvBuffer->frame_len;
14257
14258 pbcamera::CaptureRequest pbRequest;
14259 pbRequest.id = request.frame_number;
14260 pbRequest.outputBuffers.push_back(buffer);
14261
14262 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014263 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014264 if (res != OK) {
14265 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14266 strerror(-res), res);
14267 return false;
14268 }
14269
14270 hdrPlusRequest->yuvBuffer = yuvBuffer;
14271 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14272
14273 return true;
14274}
14275
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014276status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked() {
14277 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14278 return OK;
14279 }
14280
14281 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14282 if (res != OK) {
14283 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14284 strerror(-res), res);
14285 return res;
14286 }
14287 gHdrPlusClientOpening = true;
14288
14289 return OK;
14290}
14291
Chien-Yu Chenee335912017-02-09 17:53:20 -080014292status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14293{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014294 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014295
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014296 // Check if gHdrPlusClient is opened or being opened.
14297 if (gHdrPlusClient == nullptr) {
14298 if (gHdrPlusClientOpening) {
14299 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14300 return OK;
14301 }
14302
14303 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014304 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014305 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14306 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014307 return res;
14308 }
14309
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014310 // When opening HDR+ client completes, HDR+ mode will be enabled.
14311 return OK;
14312
Chien-Yu Chenee335912017-02-09 17:53:20 -080014313 }
14314
14315 // Configure stream for HDR+.
14316 res = configureHdrPlusStreamsLocked();
14317 if (res != OK) {
14318 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014319 return res;
14320 }
14321
14322 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14323 res = gHdrPlusClient->setZslHdrPlusMode(true);
14324 if (res != OK) {
14325 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014326 return res;
14327 }
14328
14329 mHdrPlusModeEnabled = true;
14330 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14331
14332 return OK;
14333}
14334
14335void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14336{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014337 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014338 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014339 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14340 if (res != OK) {
14341 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14342 }
Chien-Yu Chenee335912017-02-09 17:53:20 -080014343 }
14344
14345 mHdrPlusModeEnabled = false;
14346 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14347}
14348
14349status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014350{
14351 pbcamera::InputConfiguration inputConfig;
14352 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14353 status_t res = OK;
14354
14355 // Configure HDR+ client streams.
14356 // Get input config.
14357 if (mHdrPlusRawSrcChannel) {
14358 // HDR+ input buffers will be provided by HAL.
14359 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14360 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14361 if (res != OK) {
14362 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14363 __FUNCTION__, strerror(-res), res);
14364 return res;
14365 }
14366
14367 inputConfig.isSensorInput = false;
14368 } else {
14369 // Sensor MIPI will send data to Easel.
14370 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014371 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014372 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14373 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14374 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14375 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14376 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14377 if (mSensorModeInfo.num_raw_bits != 10) {
14378 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14379 mSensorModeInfo.num_raw_bits);
14380 return BAD_VALUE;
14381 }
14382
14383 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014384 }
14385
14386 // Get output configurations.
14387 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014388 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014389
14390 // Easel may need to output YUV output buffers if mPictureChannel was created.
14391 pbcamera::StreamConfiguration yuvOutputConfig;
14392 if (mPictureChannel != nullptr) {
14393 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14394 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14395 if (res != OK) {
14396 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14397 __FUNCTION__, strerror(-res), res);
14398
14399 return res;
14400 }
14401
14402 outputStreamConfigs.push_back(yuvOutputConfig);
14403 }
14404
14405 // TODO: consider other channels for YUV output buffers.
14406
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014407 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014408 if (res != OK) {
14409 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14410 strerror(-res), res);
14411 return res;
14412 }
14413
14414 return OK;
14415}
14416
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014417void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client) {
14418 if (client == nullptr) {
14419 ALOGE("%s: Opened client is null.", __FUNCTION__);
14420 return;
14421 }
14422
14423 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14424
14425 Mutex::Autolock l(gHdrPlusClientLock);
14426 gHdrPlusClient = std::move(client);
14427 gHdrPlusClientOpening = false;
14428
14429 // Set static metadata.
14430 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14431 if (res != OK) {
14432 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14433 __FUNCTION__, strerror(-res), res);
14434 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14435 gHdrPlusClient = nullptr;
14436 return;
14437 }
14438
14439 // Enable HDR+ mode.
14440 res = enableHdrPlusModeLocked();
14441 if (res != OK) {
14442 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14443 }
14444}
14445
14446void QCamera3HardwareInterface::onOpenFailed(status_t err) {
14447 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14448 Mutex::Autolock l(gHdrPlusClientLock);
14449 gHdrPlusClientOpening = false;
14450}
14451
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014452void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14453 const camera_metadata_t &resultMetadata) {
14454 if (result != nullptr) {
14455 if (result->outputBuffers.size() != 1) {
14456 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14457 result->outputBuffers.size());
14458 return;
14459 }
14460
14461 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14462 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14463 result->outputBuffers[0].streamId);
14464 return;
14465 }
14466
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014467 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014468 HdrPlusPendingRequest pendingRequest;
14469 {
14470 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14471 auto req = mHdrPlusPendingRequests.find(result->requestId);
14472 pendingRequest = req->second;
14473 }
14474
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014475 // Update the result metadata with the settings of the HDR+ still capture request because
14476 // the result metadata belongs to a ZSL buffer.
14477 CameraMetadata metadata;
14478 metadata = &resultMetadata;
14479 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14480 camera_metadata_t* updatedResultMetadata = metadata.release();
14481
14482 QCamera3PicChannel *picChannel =
14483 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14484
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014485 // Check if dumping HDR+ YUV output is enabled.
14486 char prop[PROPERTY_VALUE_MAX];
14487 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14488 bool dumpYuvOutput = atoi(prop);
14489
14490 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014491 // Dump yuv buffer to a ppm file.
14492 pbcamera::StreamConfiguration outputConfig;
14493 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14494 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14495 if (rc == OK) {
14496 char buf[FILENAME_MAX] = {};
14497 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14498 result->requestId, result->outputBuffers[0].streamId,
14499 outputConfig.image.width, outputConfig.image.height);
14500
14501 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14502 } else {
14503 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14504 __FUNCTION__, strerror(-rc), rc);
14505 }
14506 }
14507
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014508 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14509 auto halMetadata = std::make_shared<metadata_buffer_t>();
14510 clear_metadata_buffer(halMetadata.get());
14511
14512 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14513 // encoding.
14514 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14515 halStreamId, /*minFrameDuration*/0);
14516 if (res == OK) {
14517 // Return the buffer to pic channel for encoding.
14518 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14519 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14520 halMetadata);
14521 } else {
14522 // Return the buffer without encoding.
14523 // TODO: This should not happen but we may want to report an error buffer to camera
14524 // service.
14525 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14526 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14527 strerror(-res), res);
14528 }
14529
14530 // Send HDR+ metadata to framework.
14531 {
14532 pthread_mutex_lock(&mMutex);
14533
14534 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14535 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14536 pthread_mutex_unlock(&mMutex);
14537 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014538
14539 // Remove the HDR+ pending request.
14540 {
14541 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14542 auto req = mHdrPlusPendingRequests.find(result->requestId);
14543 mHdrPlusPendingRequests.erase(req);
14544 }
14545 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014546}
14547
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014548void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14549 // TODO: Handle HDR+ capture failures and send the failure to framework.
14550 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14551 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14552
14553 // Return the buffer to pic channel.
14554 QCamera3PicChannel *picChannel =
14555 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14556 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14557
14558 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014559}
14560
Thierry Strudel3d639192016-09-09 11:52:26 -070014561}; //end namespace qcamera