blob: d3adcf4678810d7dfd3b2ee580330df8d4362e4d [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070075// mm_camera has 2 partial results: 3A, and final result.
76// HDR+ requests have 3 partial results: postview, next request ready, and final result.
77#define PARTIAL_RESULT_COUNT 3
Thierry Strudel3d639192016-09-09 11:52:26 -070078#define FRAME_SKIP_DELAY 0
79
80#define MAX_VALUE_8BIT ((1<<8)-1)
81#define MAX_VALUE_10BIT ((1<<10)-1)
82#define MAX_VALUE_12BIT ((1<<12)-1)
83
84#define VIDEO_4K_WIDTH 3840
85#define VIDEO_4K_HEIGHT 2160
86
Jason Leeb9e76432017-03-10 17:14:19 -080087#define MAX_EIS_WIDTH 3840
88#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070089
90#define MAX_RAW_STREAMS 1
91#define MAX_STALLING_STREAMS 1
92#define MAX_PROCESSED_STREAMS 3
93/* Batch mode is enabled only if FPS set is equal to or greater than this */
94#define MIN_FPS_FOR_BATCH_MODE (120)
95#define PREVIEW_FPS_FOR_HFR (30)
96#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080097#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070098#define MAX_HFR_BATCH_SIZE (8)
99#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +0100101#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700137// Whether to check for the GPU stride padding, or use the default
138//#define CHECK_GPU_PIXEL_ALIGNMENT
139
Thierry Strudel3d639192016-09-09 11:52:26 -0700140cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142extern pthread_mutex_t gCamLock;
143volatile uint32_t gCamHal3LogLevel = 1;
144extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800146// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700148std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700152std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700155
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800156// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
157bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700158
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700159std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700160
Thierry Strudel3d639192016-09-09 11:52:26 -0700161
162const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
163 {"On", CAM_CDS_MODE_ON},
164 {"Off", CAM_CDS_MODE_OFF},
165 {"Auto",CAM_CDS_MODE_AUTO}
166};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700167const QCamera3HardwareInterface::QCameraMap<
168 camera_metadata_enum_android_video_hdr_mode_t,
169 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
170 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
171 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
172};
173
Thierry Strudel54dc9782017-02-15 12:12:10 -0800174const QCamera3HardwareInterface::QCameraMap<
175 camera_metadata_enum_android_binning_correction_mode_t,
176 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
177 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
178 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
179};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700180
181const QCamera3HardwareInterface::QCameraMap<
182 camera_metadata_enum_android_ir_mode_t,
183 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
184 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
185 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
186 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
187};
Thierry Strudel3d639192016-09-09 11:52:26 -0700188
189const QCamera3HardwareInterface::QCameraMap<
190 camera_metadata_enum_android_control_effect_mode_t,
191 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
192 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
193 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
194 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
195 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
197 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
198 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
199 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
201};
202
203const QCamera3HardwareInterface::QCameraMap<
204 camera_metadata_enum_android_control_awb_mode_t,
205 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
206 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
207 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
208 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
209 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
210 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
211 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
212 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
214 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
215};
216
217const QCamera3HardwareInterface::QCameraMap<
218 camera_metadata_enum_android_control_scene_mode_t,
219 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
220 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
221 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
222 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
223 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
224 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
226 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
227 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
228 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
229 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
230 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
231 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
232 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
233 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
234 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800235 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
236 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700237};
238
239const QCamera3HardwareInterface::QCameraMap<
240 camera_metadata_enum_android_control_af_mode_t,
241 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
242 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
244 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
245 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
246 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
247 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
249};
250
251const QCamera3HardwareInterface::QCameraMap<
252 camera_metadata_enum_android_color_correction_aberration_mode_t,
253 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
255 CAM_COLOR_CORRECTION_ABERRATION_OFF },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
257 CAM_COLOR_CORRECTION_ABERRATION_FAST },
258 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
259 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
260};
261
262const QCamera3HardwareInterface::QCameraMap<
263 camera_metadata_enum_android_control_ae_antibanding_mode_t,
264 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
269};
270
271const QCamera3HardwareInterface::QCameraMap<
272 camera_metadata_enum_android_control_ae_mode_t,
273 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
274 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
275 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
277 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
Shuzhen Wang3d11a642017-08-18 09:57:29 -0700278 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
279 { (camera_metadata_enum_android_control_ae_mode_t)
280 NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
Thierry Strudel3d639192016-09-09 11:52:26 -0700281};
282
283const QCamera3HardwareInterface::QCameraMap<
284 camera_metadata_enum_android_flash_mode_t,
285 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
286 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
287 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
288 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
289};
290
291const QCamera3HardwareInterface::QCameraMap<
292 camera_metadata_enum_android_statistics_face_detect_mode_t,
293 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
297};
298
299const QCamera3HardwareInterface::QCameraMap<
300 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
301 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
303 CAM_FOCUS_UNCALIBRATED },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
305 CAM_FOCUS_APPROXIMATE },
306 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
307 CAM_FOCUS_CALIBRATED }
308};
309
310const QCamera3HardwareInterface::QCameraMap<
311 camera_metadata_enum_android_lens_state_t,
312 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
313 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
314 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
315};
316
317const int32_t available_thumbnail_sizes[] = {0, 0,
318 176, 144,
319 240, 144,
320 256, 144,
321 240, 160,
322 256, 154,
323 240, 240,
324 320, 240};
325
326const QCamera3HardwareInterface::QCameraMap<
327 camera_metadata_enum_android_sensor_test_pattern_mode_t,
328 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
335};
336
337/* Since there is no mapping for all the options some Android enum are not listed.
338 * Also, the order in this list is important because while mapping from HAL to Android it will
339 * traverse from lower to higher index which means that for HAL values that are map to different
340 * Android values, the traverse logic will select the first one found.
341 */
342const QCamera3HardwareInterface::QCameraMap<
343 camera_metadata_enum_android_sensor_reference_illuminant1_t,
344 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
361};
362
363const QCamera3HardwareInterface::QCameraMap<
364 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
365 { 60, CAM_HFR_MODE_60FPS},
366 { 90, CAM_HFR_MODE_90FPS},
367 { 120, CAM_HFR_MODE_120FPS},
368 { 150, CAM_HFR_MODE_150FPS},
369 { 180, CAM_HFR_MODE_180FPS},
370 { 210, CAM_HFR_MODE_210FPS},
371 { 240, CAM_HFR_MODE_240FPS},
372 { 480, CAM_HFR_MODE_480FPS},
373};
374
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700375const QCamera3HardwareInterface::QCameraMap<
376 qcamera3_ext_instant_aec_mode_t,
377 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
378 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
381};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800382
383const QCamera3HardwareInterface::QCameraMap<
384 qcamera3_ext_exposure_meter_mode_t,
385 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
386 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
387 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
388 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
389 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
390 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
391 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
392 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
393};
394
395const QCamera3HardwareInterface::QCameraMap<
396 qcamera3_ext_iso_mode_t,
397 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
398 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
399 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
400 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
401 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
402 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
403 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
404 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
405 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
406};
407
Thierry Strudel3d639192016-09-09 11:52:26 -0700408camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
409 .initialize = QCamera3HardwareInterface::initialize,
410 .configure_streams = QCamera3HardwareInterface::configure_streams,
411 .register_stream_buffers = NULL,
412 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
413 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
414 .get_metadata_vendor_tag_ops = NULL,
415 .dump = QCamera3HardwareInterface::dump,
416 .flush = QCamera3HardwareInterface::flush,
417 .reserved = {0},
418};
419
420// initialise to some default value
421uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
422
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700423static inline void logEaselEvent(const char *tag, const char *event) {
424 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
425 struct timespec ts = {};
426 static int64_t kMsPerSec = 1000;
427 static int64_t kNsPerMs = 1000000;
428 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
429 if (res != OK) {
430 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
431 } else {
432 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
433 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
434 }
435 }
436}
437
Thierry Strudel3d639192016-09-09 11:52:26 -0700438/*===========================================================================
439 * FUNCTION : QCamera3HardwareInterface
440 *
441 * DESCRIPTION: constructor of QCamera3HardwareInterface
442 *
443 * PARAMETERS :
444 * @cameraId : camera ID
445 *
446 * RETURN : none
447 *==========================================================================*/
448QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
449 const camera_module_callbacks_t *callbacks)
450 : mCameraId(cameraId),
451 mCameraHandle(NULL),
452 mCameraInitialized(false),
453 mCallbackOps(NULL),
454 mMetadataChannel(NULL),
455 mPictureChannel(NULL),
456 mRawChannel(NULL),
457 mSupportChannel(NULL),
458 mAnalysisChannel(NULL),
459 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700460 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700461 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800462 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100463 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800464 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700465 mChannelHandle(0),
466 mFirstConfiguration(true),
467 mFlush(false),
468 mFlushPerf(false),
469 mParamHeap(NULL),
470 mParameters(NULL),
471 mPrevParameters(NULL),
472 m_bIsVideo(false),
473 m_bIs4KVideo(false),
474 m_bEisSupportedSize(false),
475 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800476 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700477 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700478 mShutterDispatcher(this),
479 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700480 mMinProcessedFrameDuration(0),
481 mMinJpegFrameDuration(0),
482 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100483 mExpectedFrameDuration(0),
484 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700485 mMetaFrameCount(0U),
486 mUpdateDebugLevel(false),
487 mCallbacks(callbacks),
488 mCaptureIntent(0),
489 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800490 /* DevCamDebug metadata internal m control*/
491 mDevCamDebugMetaEnable(0),
492 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700493 mBatchSize(0),
494 mToBeQueuedVidBufs(0),
495 mHFRVideoFps(DEFAULT_VIDEO_FPS),
496 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800497 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800498 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700499 mFirstFrameNumberInBatch(0),
500 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800501 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700502 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
503 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000504 mPDSupported(false),
505 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700506 mInstantAEC(false),
507 mResetInstantAEC(false),
508 mInstantAECSettledFrameNumber(0),
509 mAecSkipDisplayFrameBound(0),
510 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700511 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800512 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700513 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700514 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700515 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700516 mState(CLOSED),
517 mIsDeviceLinked(false),
518 mIsMainCamera(true),
519 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700520 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800521 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800522 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700523 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800524 mIsApInputUsedForHdrPlus(false),
525 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700526 m_bSensorHDREnabled(false),
527 mAfTrigger()
Thierry Strudel3d639192016-09-09 11:52:26 -0700528{
529 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700530 mCommon.init(gCamCapability[cameraId]);
531 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700532#ifndef USE_HAL_3_3
533 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
534#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700535 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700536#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700537 mCameraDevice.common.close = close_camera_device;
538 mCameraDevice.ops = &mCameraOps;
539 mCameraDevice.priv = this;
540 gCamCapability[cameraId]->version = CAM_HAL_V3;
541 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
542 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
543 gCamCapability[cameraId]->min_num_pp_bufs = 3;
544
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800545 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700546
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800547 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700548 mPendingLiveRequest = 0;
549 mCurrentRequestId = -1;
550 pthread_mutex_init(&mMutex, NULL);
551
552 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
553 mDefaultMetadata[i] = NULL;
554
555 // Getting system props of different kinds
556 char prop[PROPERTY_VALUE_MAX];
557 memset(prop, 0, sizeof(prop));
558 property_get("persist.camera.raw.dump", prop, "0");
559 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800560 property_get("persist.camera.hal3.force.hdr", prop, "0");
561 mForceHdrSnapshot = atoi(prop);
562
Thierry Strudel3d639192016-09-09 11:52:26 -0700563 if (mEnableRawDump)
564 LOGD("Raw dump from Camera HAL enabled");
565
566 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
567 memset(mLdafCalib, 0, sizeof(mLdafCalib));
568
Arnd Geis082a4d72017-08-24 10:33:07 -0700569 memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
570
Thierry Strudel3d639192016-09-09 11:52:26 -0700571 memset(prop, 0, sizeof(prop));
572 property_get("persist.camera.tnr.preview", prop, "0");
573 m_bTnrPreview = (uint8_t)atoi(prop);
574
575 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800576 property_get("persist.camera.swtnr.preview", prop, "1");
577 m_bSwTnrPreview = (uint8_t)atoi(prop);
578
579 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700580 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700581 m_bTnrVideo = (uint8_t)atoi(prop);
582
583 memset(prop, 0, sizeof(prop));
584 property_get("persist.camera.avtimer.debug", prop, "0");
585 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800586 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700587
Thierry Strudel54dc9782017-02-15 12:12:10 -0800588 memset(prop, 0, sizeof(prop));
589 property_get("persist.camera.cacmode.disable", prop, "0");
590 m_cacModeDisabled = (uint8_t)atoi(prop);
591
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700592 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700593 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700594
Thierry Strudel3d639192016-09-09 11:52:26 -0700595 //Load and read GPU library.
596 lib_surface_utils = NULL;
597 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700598 mSurfaceStridePadding = CAM_PAD_TO_64;
599#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700600 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
601 if (lib_surface_utils) {
602 *(void **)&LINK_get_surface_pixel_alignment =
603 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
604 if (LINK_get_surface_pixel_alignment) {
605 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
606 }
607 dlclose(lib_surface_utils);
608 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700609#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000610 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
611 mPDSupported = (0 <= mPDIndex) ? true : false;
612
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700613 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700614}
615
616/*===========================================================================
617 * FUNCTION : ~QCamera3HardwareInterface
618 *
619 * DESCRIPTION: destructor of QCamera3HardwareInterface
620 *
621 * PARAMETERS : none
622 *
623 * RETURN : none
624 *==========================================================================*/
625QCamera3HardwareInterface::~QCamera3HardwareInterface()
626{
627 LOGD("E");
628
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800629 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700630
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800631 // Disable power hint and enable the perf lock for close camera
632 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
633 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
634
635 // unlink of dualcam during close camera
636 if (mIsDeviceLinked) {
637 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
638 &m_pDualCamCmdPtr->bundle_info;
639 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
640 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
641 pthread_mutex_lock(&gCamLock);
642
643 if (mIsMainCamera == 1) {
644 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
645 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
646 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
647 // related session id should be session id of linked session
648 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
649 } else {
650 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
651 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
652 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
653 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
654 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800655 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800656 pthread_mutex_unlock(&gCamLock);
657
658 rc = mCameraHandle->ops->set_dual_cam_cmd(
659 mCameraHandle->camera_handle);
660 if (rc < 0) {
661 LOGE("Dualcam: Unlink failed, but still proceed to close");
662 }
663 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700664
665 /* We need to stop all streams before deleting any stream */
666 if (mRawDumpChannel) {
667 mRawDumpChannel->stop();
668 }
669
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700670 if (mHdrPlusRawSrcChannel) {
671 mHdrPlusRawSrcChannel->stop();
672 }
673
Thierry Strudel3d639192016-09-09 11:52:26 -0700674 // NOTE: 'camera3_stream_t *' objects are already freed at
675 // this stage by the framework
676 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
677 it != mStreamInfo.end(); it++) {
678 QCamera3ProcessingChannel *channel = (*it)->channel;
679 if (channel) {
680 channel->stop();
681 }
682 }
683 if (mSupportChannel)
684 mSupportChannel->stop();
685
686 if (mAnalysisChannel) {
687 mAnalysisChannel->stop();
688 }
689 if (mMetadataChannel) {
690 mMetadataChannel->stop();
691 }
692 if (mChannelHandle) {
693 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -0700694 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700695 LOGD("stopping channel %d", mChannelHandle);
696 }
697
698 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
699 it != mStreamInfo.end(); it++) {
700 QCamera3ProcessingChannel *channel = (*it)->channel;
701 if (channel)
702 delete channel;
703 free (*it);
704 }
705 if (mSupportChannel) {
706 delete mSupportChannel;
707 mSupportChannel = NULL;
708 }
709
710 if (mAnalysisChannel) {
711 delete mAnalysisChannel;
712 mAnalysisChannel = NULL;
713 }
714 if (mRawDumpChannel) {
715 delete mRawDumpChannel;
716 mRawDumpChannel = NULL;
717 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700718 if (mHdrPlusRawSrcChannel) {
719 delete mHdrPlusRawSrcChannel;
720 mHdrPlusRawSrcChannel = NULL;
721 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700722 if (mDummyBatchChannel) {
723 delete mDummyBatchChannel;
724 mDummyBatchChannel = NULL;
725 }
726
727 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800728 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700729
730 if (mMetadataChannel) {
731 delete mMetadataChannel;
732 mMetadataChannel = NULL;
733 }
734
735 /* Clean up all channels */
736 if (mCameraInitialized) {
737 if(!mFirstConfiguration){
738 //send the last unconfigure
739 cam_stream_size_info_t stream_config_info;
740 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
741 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
742 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800743 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700744 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700745 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700746 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
747 stream_config_info);
748 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
749 if (rc < 0) {
750 LOGE("set_parms failed for unconfigure");
751 }
752 }
753 deinitParameters();
754 }
755
756 if (mChannelHandle) {
757 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
758 mChannelHandle);
759 LOGH("deleting channel %d", mChannelHandle);
760 mChannelHandle = 0;
761 }
762
763 if (mState != CLOSED)
764 closeCamera();
765
766 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
767 req.mPendingBufferList.clear();
768 }
769 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700770 for (pendingRequestIterator i = mPendingRequestsList.begin();
771 i != mPendingRequestsList.end();) {
772 i = erasePendingRequest(i);
773 }
774 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
775 if (mDefaultMetadata[i])
776 free_camera_metadata(mDefaultMetadata[i]);
777
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800778 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700779
780 pthread_cond_destroy(&mRequestCond);
781
782 pthread_cond_destroy(&mBuffersCond);
783
784 pthread_mutex_destroy(&mMutex);
785 LOGD("X");
786}
787
788/*===========================================================================
789 * FUNCTION : erasePendingRequest
790 *
791 * DESCRIPTION: function to erase a desired pending request after freeing any
792 * allocated memory
793 *
794 * PARAMETERS :
795 * @i : iterator pointing to pending request to be erased
796 *
797 * RETURN : iterator pointing to the next request
798 *==========================================================================*/
799QCamera3HardwareInterface::pendingRequestIterator
800 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
801{
802 if (i->input_buffer != NULL) {
803 free(i->input_buffer);
804 i->input_buffer = NULL;
805 }
806 if (i->settings != NULL)
807 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100808
809 mExpectedInflightDuration -= i->expectedFrameDuration;
810 if (mExpectedInflightDuration < 0) {
811 LOGE("Negative expected in-flight duration!");
812 mExpectedInflightDuration = 0;
813 }
814
Thierry Strudel3d639192016-09-09 11:52:26 -0700815 return mPendingRequestsList.erase(i);
816}
817
818/*===========================================================================
819 * FUNCTION : camEvtHandle
820 *
821 * DESCRIPTION: Function registered to mm-camera-interface to handle events
822 *
823 * PARAMETERS :
824 * @camera_handle : interface layer camera handle
825 * @evt : ptr to event
826 * @user_data : user data ptr
827 *
828 * RETURN : none
829 *==========================================================================*/
830void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
831 mm_camera_event_t *evt,
832 void *user_data)
833{
834 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
835 if (obj && evt) {
836 switch(evt->server_event_type) {
837 case CAM_EVENT_TYPE_DAEMON_DIED:
838 pthread_mutex_lock(&obj->mMutex);
839 obj->mState = ERROR;
840 pthread_mutex_unlock(&obj->mMutex);
841 LOGE("Fatal, camera daemon died");
842 break;
843
844 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
845 LOGD("HAL got request pull from Daemon");
846 pthread_mutex_lock(&obj->mMutex);
847 obj->mWokenUpByDaemon = true;
848 obj->unblockRequestIfNecessary();
849 pthread_mutex_unlock(&obj->mMutex);
850 break;
851
852 default:
853 LOGW("Warning: Unhandled event %d",
854 evt->server_event_type);
855 break;
856 }
857 } else {
858 LOGE("NULL user_data/evt");
859 }
860}
861
862/*===========================================================================
863 * FUNCTION : openCamera
864 *
865 * DESCRIPTION: open camera
866 *
867 * PARAMETERS :
868 * @hw_device : double ptr for camera device struct
869 *
870 * RETURN : int32_t type of status
871 * NO_ERROR -- success
872 * none-zero failure code
873 *==========================================================================*/
874int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
875{
876 int rc = 0;
877 if (mState != CLOSED) {
878 *hw_device = NULL;
879 return PERMISSION_DENIED;
880 }
881
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700882 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800883 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700884 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
885 mCameraId);
886
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700887 if (mCameraHandle) {
888 LOGE("Failure: Camera already opened");
889 return ALREADY_EXISTS;
890 }
891
892 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700893 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700894 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700895 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700896 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700897 if (rc != 0) {
898 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
899 return rc;
900 }
901 }
902 }
903
Thierry Strudel3d639192016-09-09 11:52:26 -0700904 rc = openCamera();
905 if (rc == 0) {
906 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800907 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700908 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700909
910 // Suspend Easel because opening camera failed.
911 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700912 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700913 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
914 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700915 if (suspendErr != 0) {
916 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
917 strerror(-suspendErr), suspendErr);
918 }
919 }
920 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800921 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700922
Thierry Strudel3d639192016-09-09 11:52:26 -0700923 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
924 mCameraId, rc);
925
926 if (rc == NO_ERROR) {
927 mState = OPENED;
928 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800929
Thierry Strudel3d639192016-09-09 11:52:26 -0700930 return rc;
931}
932
933/*===========================================================================
934 * FUNCTION : openCamera
935 *
936 * DESCRIPTION: open camera
937 *
938 * PARAMETERS : none
939 *
940 * RETURN : int32_t type of status
941 * NO_ERROR -- success
942 * none-zero failure code
943 *==========================================================================*/
944int QCamera3HardwareInterface::openCamera()
945{
946 int rc = 0;
947 char value[PROPERTY_VALUE_MAX];
948
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800949 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800950
Thierry Strudel3d639192016-09-09 11:52:26 -0700951 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
952 if (rc < 0) {
953 LOGE("Failed to reserve flash for camera id: %d",
954 mCameraId);
955 return UNKNOWN_ERROR;
956 }
957
958 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
959 if (rc) {
960 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
961 return rc;
962 }
963
964 if (!mCameraHandle) {
965 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
966 return -ENODEV;
967 }
968
969 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
970 camEvtHandle, (void *)this);
971
972 if (rc < 0) {
973 LOGE("Error, failed to register event callback");
974 /* Not closing camera here since it is already handled in destructor */
975 return FAILED_TRANSACTION;
976 }
977
978 mExifParams.debug_params =
979 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
980 if (mExifParams.debug_params) {
981 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
982 } else {
983 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
984 return NO_MEMORY;
985 }
986 mFirstConfiguration = true;
987
988 //Notify display HAL that a camera session is active.
989 //But avoid calling the same during bootup because camera service might open/close
990 //cameras at boot time during its initialization and display service will also internally
991 //wait for camera service to initialize first while calling this display API, resulting in a
992 //deadlock situation. Since boot time camera open/close calls are made only to fetch
993 //capabilities, no need of this display bw optimization.
994 //Use "service.bootanim.exit" property to know boot status.
995 property_get("service.bootanim.exit", value, "0");
996 if (atoi(value) == 1) {
997 pthread_mutex_lock(&gCamLock);
998 if (gNumCameraSessions++ == 0) {
999 setCameraLaunchStatus(true);
1000 }
1001 pthread_mutex_unlock(&gCamLock);
1002 }
1003
1004 //fill the session id needed while linking dual cam
1005 pthread_mutex_lock(&gCamLock);
1006 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1007 &sessionId[mCameraId]);
1008 pthread_mutex_unlock(&gCamLock);
1009
1010 if (rc < 0) {
1011 LOGE("Error, failed to get sessiion id");
1012 return UNKNOWN_ERROR;
1013 } else {
1014 //Allocate related cam sync buffer
1015 //this is needed for the payload that goes along with bundling cmd for related
1016 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001017 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1018 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001019 if(rc != OK) {
1020 rc = NO_MEMORY;
1021 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1022 return NO_MEMORY;
1023 }
1024
1025 //Map memory for related cam sync buffer
1026 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001027 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1028 m_pDualCamCmdHeap->getFd(0),
1029 sizeof(cam_dual_camera_cmd_info_t),
1030 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001031 if(rc < 0) {
1032 LOGE("Dualcam: failed to map Related cam sync buffer");
1033 rc = FAILED_TRANSACTION;
1034 return NO_MEMORY;
1035 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001036 m_pDualCamCmdPtr =
1037 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001038 }
1039
1040 LOGH("mCameraId=%d",mCameraId);
1041
1042 return NO_ERROR;
1043}
1044
1045/*===========================================================================
1046 * FUNCTION : closeCamera
1047 *
1048 * DESCRIPTION: close camera
1049 *
1050 * PARAMETERS : none
1051 *
1052 * RETURN : int32_t type of status
1053 * NO_ERROR -- success
1054 * none-zero failure code
1055 *==========================================================================*/
1056int QCamera3HardwareInterface::closeCamera()
1057{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001058 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001059 int rc = NO_ERROR;
1060 char value[PROPERTY_VALUE_MAX];
1061
1062 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1063 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001064
1065 // unmap memory for related cam sync buffer
1066 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001067 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001068 if (NULL != m_pDualCamCmdHeap) {
1069 m_pDualCamCmdHeap->deallocate();
1070 delete m_pDualCamCmdHeap;
1071 m_pDualCamCmdHeap = NULL;
1072 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001073 }
1074
Thierry Strudel3d639192016-09-09 11:52:26 -07001075 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1076 mCameraHandle = NULL;
1077
1078 //reset session id to some invalid id
1079 pthread_mutex_lock(&gCamLock);
1080 sessionId[mCameraId] = 0xDEADBEEF;
1081 pthread_mutex_unlock(&gCamLock);
1082
1083 //Notify display HAL that there is no active camera session
1084 //but avoid calling the same during bootup. Refer to openCamera
1085 //for more details.
1086 property_get("service.bootanim.exit", value, "0");
1087 if (atoi(value) == 1) {
1088 pthread_mutex_lock(&gCamLock);
1089 if (--gNumCameraSessions == 0) {
1090 setCameraLaunchStatus(false);
1091 }
1092 pthread_mutex_unlock(&gCamLock);
1093 }
1094
Thierry Strudel3d639192016-09-09 11:52:26 -07001095 if (mExifParams.debug_params) {
1096 free(mExifParams.debug_params);
1097 mExifParams.debug_params = NULL;
1098 }
1099 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1100 LOGW("Failed to release flash for camera id: %d",
1101 mCameraId);
1102 }
1103 mState = CLOSED;
1104 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1105 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001106
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001107 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001108 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1109 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001110 if (gHdrPlusClient != nullptr) {
1111 // Disable HDR+ mode.
1112 disableHdrPlusModeLocked();
1113 // Disconnect Easel if it's connected.
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001114 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001115 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001116 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001117
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001118 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001119 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001120 if (rc != 0) {
1121 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1122 }
1123
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001124 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001125 if (rc != 0) {
1126 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1127 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001128 }
1129 }
1130
Thierry Strudel3d639192016-09-09 11:52:26 -07001131 return rc;
1132}
1133
1134/*===========================================================================
1135 * FUNCTION : initialize
1136 *
1137 * DESCRIPTION: Initialize frameworks callback functions
1138 *
1139 * PARAMETERS :
1140 * @callback_ops : callback function to frameworks
1141 *
1142 * RETURN :
1143 *
1144 *==========================================================================*/
1145int QCamera3HardwareInterface::initialize(
1146 const struct camera3_callback_ops *callback_ops)
1147{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001148 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001149 int rc;
1150
1151 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1152 pthread_mutex_lock(&mMutex);
1153
1154 // Validate current state
1155 switch (mState) {
1156 case OPENED:
1157 /* valid state */
1158 break;
1159 default:
1160 LOGE("Invalid state %d", mState);
1161 rc = -ENODEV;
1162 goto err1;
1163 }
1164
1165 rc = initParameters();
1166 if (rc < 0) {
1167 LOGE("initParamters failed %d", rc);
1168 goto err1;
1169 }
1170 mCallbackOps = callback_ops;
1171
1172 mChannelHandle = mCameraHandle->ops->add_channel(
1173 mCameraHandle->camera_handle, NULL, NULL, this);
1174 if (mChannelHandle == 0) {
1175 LOGE("add_channel failed");
1176 rc = -ENOMEM;
1177 pthread_mutex_unlock(&mMutex);
1178 return rc;
1179 }
1180
1181 pthread_mutex_unlock(&mMutex);
1182 mCameraInitialized = true;
1183 mState = INITIALIZED;
1184 LOGI("X");
1185 return 0;
1186
1187err1:
1188 pthread_mutex_unlock(&mMutex);
1189 return rc;
1190}
1191
1192/*===========================================================================
1193 * FUNCTION : validateStreamDimensions
1194 *
1195 * DESCRIPTION: Check if the configuration requested are those advertised
1196 *
1197 * PARAMETERS :
1198 * @stream_list : streams to be configured
1199 *
1200 * RETURN :
1201 *
1202 *==========================================================================*/
1203int QCamera3HardwareInterface::validateStreamDimensions(
1204 camera3_stream_configuration_t *streamList)
1205{
1206 int rc = NO_ERROR;
1207 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001208 uint32_t depthWidth = 0;
1209 uint32_t depthHeight = 0;
1210 if (mPDSupported) {
1211 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1212 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1213 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001214
1215 camera3_stream_t *inputStream = NULL;
1216 /*
1217 * Loop through all streams to find input stream if it exists*
1218 */
1219 for (size_t i = 0; i< streamList->num_streams; i++) {
1220 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1221 if (inputStream != NULL) {
1222 LOGE("Error, Multiple input streams requested");
1223 return -EINVAL;
1224 }
1225 inputStream = streamList->streams[i];
1226 }
1227 }
1228 /*
1229 * Loop through all streams requested in configuration
1230 * Check if unsupported sizes have been requested on any of them
1231 */
1232 for (size_t j = 0; j < streamList->num_streams; j++) {
1233 bool sizeFound = false;
1234 camera3_stream_t *newStream = streamList->streams[j];
1235
1236 uint32_t rotatedHeight = newStream->height;
1237 uint32_t rotatedWidth = newStream->width;
1238 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1239 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1240 rotatedHeight = newStream->width;
1241 rotatedWidth = newStream->height;
1242 }
1243
1244 /*
1245 * Sizes are different for each type of stream format check against
1246 * appropriate table.
1247 */
1248 switch (newStream->format) {
1249 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1250 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1251 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001252 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1253 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1254 mPDSupported) {
1255 if ((depthWidth == newStream->width) &&
1256 (depthHeight == newStream->height)) {
1257 sizeFound = true;
1258 }
1259 break;
1260 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001261 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1262 for (size_t i = 0; i < count; i++) {
1263 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1264 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1265 sizeFound = true;
1266 break;
1267 }
1268 }
1269 break;
1270 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001271 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1272 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001273 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001274 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001275 if ((depthSamplesCount == newStream->width) &&
1276 (1 == newStream->height)) {
1277 sizeFound = true;
1278 }
1279 break;
1280 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001281 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1282 /* Verify set size against generated sizes table */
1283 for (size_t i = 0; i < count; i++) {
1284 if (((int32_t)rotatedWidth ==
1285 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1286 ((int32_t)rotatedHeight ==
1287 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1288 sizeFound = true;
1289 break;
1290 }
1291 }
1292 break;
1293 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1294 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1295 default:
1296 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1297 || newStream->stream_type == CAMERA3_STREAM_INPUT
1298 || IS_USAGE_ZSL(newStream->usage)) {
1299 if (((int32_t)rotatedWidth ==
1300 gCamCapability[mCameraId]->active_array_size.width) &&
1301 ((int32_t)rotatedHeight ==
1302 gCamCapability[mCameraId]->active_array_size.height)) {
1303 sizeFound = true;
1304 break;
1305 }
1306 /* We could potentially break here to enforce ZSL stream
1307 * set from frameworks always is full active array size
1308 * but it is not clear from the spc if framework will always
1309 * follow that, also we have logic to override to full array
1310 * size, so keeping the logic lenient at the moment
1311 */
1312 }
1313 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1314 MAX_SIZES_CNT);
1315 for (size_t i = 0; i < count; i++) {
1316 if (((int32_t)rotatedWidth ==
1317 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1318 ((int32_t)rotatedHeight ==
1319 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1320 sizeFound = true;
1321 break;
1322 }
1323 }
1324 break;
1325 } /* End of switch(newStream->format) */
1326
1327 /* We error out even if a single stream has unsupported size set */
1328 if (!sizeFound) {
1329 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1330 rotatedWidth, rotatedHeight, newStream->format,
1331 gCamCapability[mCameraId]->active_array_size.width,
1332 gCamCapability[mCameraId]->active_array_size.height);
1333 rc = -EINVAL;
1334 break;
1335 }
1336 } /* End of for each stream */
1337 return rc;
1338}
1339
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001340/*===========================================================================
1341 * FUNCTION : validateUsageFlags
1342 *
1343 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1344 *
1345 * PARAMETERS :
1346 * @stream_list : streams to be configured
1347 *
1348 * RETURN :
1349 * NO_ERROR if the usage flags are supported
1350 * error code if usage flags are not supported
1351 *
1352 *==========================================================================*/
1353int QCamera3HardwareInterface::validateUsageFlags(
1354 const camera3_stream_configuration_t* streamList)
1355{
1356 for (size_t j = 0; j < streamList->num_streams; j++) {
1357 const camera3_stream_t *newStream = streamList->streams[j];
1358
1359 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1360 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1361 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1362 continue;
1363 }
1364
Jason Leec4cf5032017-05-24 18:31:41 -07001365 // Here we only care whether it's EIS3 or not
1366 char is_type_value[PROPERTY_VALUE_MAX];
1367 property_get("persist.camera.is_type", is_type_value, "4");
1368 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1369 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1370 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1371 isType = IS_TYPE_NONE;
1372
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001373 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1374 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1375 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1376 bool forcePreviewUBWC = true;
1377 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1378 forcePreviewUBWC = false;
1379 }
1380 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001381 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001382 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001383 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001384 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001385 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001386
1387 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1388 // So color spaces will always match.
1389
1390 // Check whether underlying formats of shared streams match.
1391 if (isVideo && isPreview && videoFormat != previewFormat) {
1392 LOGE("Combined video and preview usage flag is not supported");
1393 return -EINVAL;
1394 }
1395 if (isPreview && isZSL && previewFormat != zslFormat) {
1396 LOGE("Combined preview and zsl usage flag is not supported");
1397 return -EINVAL;
1398 }
1399 if (isVideo && isZSL && videoFormat != zslFormat) {
1400 LOGE("Combined video and zsl usage flag is not supported");
1401 return -EINVAL;
1402 }
1403 }
1404 return NO_ERROR;
1405}
1406
1407/*===========================================================================
1408 * FUNCTION : validateUsageFlagsForEis
1409 *
1410 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1411 *
1412 * PARAMETERS :
1413 * @stream_list : streams to be configured
1414 *
1415 * RETURN :
1416 * NO_ERROR if the usage flags are supported
1417 * error code if usage flags are not supported
1418 *
1419 *==========================================================================*/
1420int QCamera3HardwareInterface::validateUsageFlagsForEis(
1421 const camera3_stream_configuration_t* streamList)
1422{
1423 for (size_t j = 0; j < streamList->num_streams; j++) {
1424 const camera3_stream_t *newStream = streamList->streams[j];
1425
1426 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1427 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1428
1429 // Because EIS is "hard-coded" for certain use case, and current
1430 // implementation doesn't support shared preview and video on the same
1431 // stream, return failure if EIS is forced on.
1432 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1433 LOGE("Combined video and preview usage flag is not supported due to EIS");
1434 return -EINVAL;
1435 }
1436 }
1437 return NO_ERROR;
1438}
1439
Thierry Strudel3d639192016-09-09 11:52:26 -07001440/*==============================================================================
1441 * FUNCTION : isSupportChannelNeeded
1442 *
1443 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1444 *
1445 * PARAMETERS :
1446 * @stream_list : streams to be configured
1447 * @stream_config_info : the config info for streams to be configured
1448 *
1449 * RETURN : Boolen true/false decision
1450 *
1451 *==========================================================================*/
1452bool QCamera3HardwareInterface::isSupportChannelNeeded(
1453 camera3_stream_configuration_t *streamList,
1454 cam_stream_size_info_t stream_config_info)
1455{
1456 uint32_t i;
1457 bool pprocRequested = false;
1458 /* Check for conditions where PProc pipeline does not have any streams*/
1459 for (i = 0; i < stream_config_info.num_streams; i++) {
1460 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1461 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1462 pprocRequested = true;
1463 break;
1464 }
1465 }
1466
1467 if (pprocRequested == false )
1468 return true;
1469
1470 /* Dummy stream needed if only raw or jpeg streams present */
1471 for (i = 0; i < streamList->num_streams; i++) {
1472 switch(streamList->streams[i]->format) {
1473 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1474 case HAL_PIXEL_FORMAT_RAW10:
1475 case HAL_PIXEL_FORMAT_RAW16:
1476 case HAL_PIXEL_FORMAT_BLOB:
1477 break;
1478 default:
1479 return false;
1480 }
1481 }
1482 return true;
1483}
1484
1485/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001486 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001487 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001488 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001489 *
1490 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001491 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001492 *
1493 * RETURN : int32_t type of status
1494 * NO_ERROR -- success
1495 * none-zero failure code
1496 *
1497 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001498int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001499{
1500 int32_t rc = NO_ERROR;
1501
1502 cam_dimension_t max_dim = {0, 0};
1503 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1504 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1505 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1506 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1507 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1508 }
1509
1510 clear_metadata_buffer(mParameters);
1511
1512 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1513 max_dim);
1514 if (rc != NO_ERROR) {
1515 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1516 return rc;
1517 }
1518
1519 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1520 if (rc != NO_ERROR) {
1521 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1522 return rc;
1523 }
1524
1525 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001526 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001527
1528 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1529 mParameters);
1530 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001531 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001532 return rc;
1533 }
1534
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001535 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001536 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1537 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1538 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1539 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1540 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001541
1542 return rc;
1543}
1544
1545/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001546 * FUNCTION : getCurrentSensorModeInfo
1547 *
1548 * DESCRIPTION: Get sensor mode information that is currently selected.
1549 *
1550 * PARAMETERS :
1551 * @sensorModeInfo : sensor mode information (output)
1552 *
1553 * RETURN : int32_t type of status
1554 * NO_ERROR -- success
1555 * none-zero failure code
1556 *
1557 *==========================================================================*/
1558int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1559{
1560 int32_t rc = NO_ERROR;
1561
1562 clear_metadata_buffer(mParameters);
1563 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1564
1565 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1566 mParameters);
1567 if (rc != NO_ERROR) {
1568 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1569 return rc;
1570 }
1571
1572 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1573 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1574 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1575 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1576 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1577 sensorModeInfo.num_raw_bits);
1578
1579 return rc;
1580}
1581
1582/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001583 * FUNCTION : addToPPFeatureMask
1584 *
1585 * DESCRIPTION: add additional features to pp feature mask based on
1586 * stream type and usecase
1587 *
1588 * PARAMETERS :
1589 * @stream_format : stream type for feature mask
1590 * @stream_idx : stream idx within postprocess_mask list to change
1591 *
1592 * RETURN : NULL
1593 *
1594 *==========================================================================*/
1595void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1596 uint32_t stream_idx)
1597{
1598 char feature_mask_value[PROPERTY_VALUE_MAX];
1599 cam_feature_mask_t feature_mask;
1600 int args_converted;
1601 int property_len;
1602
1603 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001604#ifdef _LE_CAMERA_
1605 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1606 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1607 property_len = property_get("persist.camera.hal3.feature",
1608 feature_mask_value, swtnr_feature_mask_value);
1609#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001610 property_len = property_get("persist.camera.hal3.feature",
1611 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001612#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001613 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1614 (feature_mask_value[1] == 'x')) {
1615 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1616 } else {
1617 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1618 }
1619 if (1 != args_converted) {
1620 feature_mask = 0;
1621 LOGE("Wrong feature mask %s", feature_mask_value);
1622 return;
1623 }
1624
1625 switch (stream_format) {
1626 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1627 /* Add LLVD to pp feature mask only if video hint is enabled */
1628 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1629 mStreamConfigInfo.postprocess_mask[stream_idx]
1630 |= CAM_QTI_FEATURE_SW_TNR;
1631 LOGH("Added SW TNR to pp feature mask");
1632 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1633 mStreamConfigInfo.postprocess_mask[stream_idx]
1634 |= CAM_QCOM_FEATURE_LLVD;
1635 LOGH("Added LLVD SeeMore to pp feature mask");
1636 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001637 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1638 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1639 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1640 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001641 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1642 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1643 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1644 CAM_QTI_FEATURE_BINNING_CORRECTION;
1645 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001646 break;
1647 }
1648 default:
1649 break;
1650 }
1651 LOGD("PP feature mask %llx",
1652 mStreamConfigInfo.postprocess_mask[stream_idx]);
1653}
1654
1655/*==============================================================================
1656 * FUNCTION : updateFpsInPreviewBuffer
1657 *
1658 * DESCRIPTION: update FPS information in preview buffer.
1659 *
1660 * PARAMETERS :
1661 * @metadata : pointer to metadata buffer
1662 * @frame_number: frame_number to look for in pending buffer list
1663 *
1664 * RETURN : None
1665 *
1666 *==========================================================================*/
1667void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1668 uint32_t frame_number)
1669{
1670 // Mark all pending buffers for this particular request
1671 // with corresponding framerate information
1672 for (List<PendingBuffersInRequest>::iterator req =
1673 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1674 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1675 for(List<PendingBufferInfo>::iterator j =
1676 req->mPendingBufferList.begin();
1677 j != req->mPendingBufferList.end(); j++) {
1678 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1679 if ((req->frame_number == frame_number) &&
1680 (channel->getStreamTypeMask() &
1681 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1682 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1683 CAM_INTF_PARM_FPS_RANGE, metadata) {
1684 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1685 struct private_handle_t *priv_handle =
1686 (struct private_handle_t *)(*(j->buffer));
1687 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1688 }
1689 }
1690 }
1691 }
1692}
1693
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001694/*==============================================================================
1695 * FUNCTION : updateTimeStampInPendingBuffers
1696 *
1697 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1698 * of a frame number
1699 *
1700 * PARAMETERS :
1701 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1702 * @timestamp : timestamp to be set
1703 *
1704 * RETURN : None
1705 *
1706 *==========================================================================*/
1707void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1708 uint32_t frameNumber, nsecs_t timestamp)
1709{
1710 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1711 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1712 if (req->frame_number != frameNumber)
1713 continue;
1714
1715 for (auto k = req->mPendingBufferList.begin();
1716 k != req->mPendingBufferList.end(); k++ ) {
1717 struct private_handle_t *priv_handle =
1718 (struct private_handle_t *) (*(k->buffer));
1719 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1720 }
1721 }
1722 return;
1723}
1724
Thierry Strudel3d639192016-09-09 11:52:26 -07001725/*===========================================================================
1726 * FUNCTION : configureStreams
1727 *
1728 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1729 * and output streams.
1730 *
1731 * PARAMETERS :
1732 * @stream_list : streams to be configured
1733 *
1734 * RETURN :
1735 *
1736 *==========================================================================*/
1737int QCamera3HardwareInterface::configureStreams(
1738 camera3_stream_configuration_t *streamList)
1739{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001740 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001741 int rc = 0;
1742
1743 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001744 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001745 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001746 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001747
1748 return rc;
1749}
1750
1751/*===========================================================================
1752 * FUNCTION : configureStreamsPerfLocked
1753 *
1754 * DESCRIPTION: configureStreams while perfLock is held.
1755 *
1756 * PARAMETERS :
1757 * @stream_list : streams to be configured
1758 *
1759 * RETURN : int32_t type of status
1760 * NO_ERROR -- success
1761 * none-zero failure code
1762 *==========================================================================*/
1763int QCamera3HardwareInterface::configureStreamsPerfLocked(
1764 camera3_stream_configuration_t *streamList)
1765{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001766 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001767 int rc = 0;
1768
1769 // Sanity check stream_list
1770 if (streamList == NULL) {
1771 LOGE("NULL stream configuration");
1772 return BAD_VALUE;
1773 }
1774 if (streamList->streams == NULL) {
1775 LOGE("NULL stream list");
1776 return BAD_VALUE;
1777 }
1778
1779 if (streamList->num_streams < 1) {
1780 LOGE("Bad number of streams requested: %d",
1781 streamList->num_streams);
1782 return BAD_VALUE;
1783 }
1784
1785 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1786 LOGE("Maximum number of streams %d exceeded: %d",
1787 MAX_NUM_STREAMS, streamList->num_streams);
1788 return BAD_VALUE;
1789 }
1790
Jason Leec4cf5032017-05-24 18:31:41 -07001791 mOpMode = streamList->operation_mode;
1792 LOGD("mOpMode: %d", mOpMode);
1793
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001794 rc = validateUsageFlags(streamList);
1795 if (rc != NO_ERROR) {
1796 return rc;
1797 }
1798
Thierry Strudel3d639192016-09-09 11:52:26 -07001799 /* first invalidate all the steams in the mStreamList
1800 * if they appear again, they will be validated */
1801 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1802 it != mStreamInfo.end(); it++) {
1803 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1804 if (channel) {
1805 channel->stop();
1806 }
1807 (*it)->status = INVALID;
1808 }
1809
1810 if (mRawDumpChannel) {
1811 mRawDumpChannel->stop();
1812 delete mRawDumpChannel;
1813 mRawDumpChannel = NULL;
1814 }
1815
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001816 if (mHdrPlusRawSrcChannel) {
1817 mHdrPlusRawSrcChannel->stop();
1818 delete mHdrPlusRawSrcChannel;
1819 mHdrPlusRawSrcChannel = NULL;
1820 }
1821
Thierry Strudel3d639192016-09-09 11:52:26 -07001822 if (mSupportChannel)
1823 mSupportChannel->stop();
1824
1825 if (mAnalysisChannel) {
1826 mAnalysisChannel->stop();
1827 }
1828 if (mMetadataChannel) {
1829 /* If content of mStreamInfo is not 0, there is metadata stream */
1830 mMetadataChannel->stop();
1831 }
1832 if (mChannelHandle) {
1833 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07001834 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001835 LOGD("stopping channel %d", mChannelHandle);
1836 }
1837
1838 pthread_mutex_lock(&mMutex);
1839
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001840 mPictureChannel = NULL;
1841
Thierry Strudel3d639192016-09-09 11:52:26 -07001842 // Check state
1843 switch (mState) {
1844 case INITIALIZED:
1845 case CONFIGURED:
1846 case STARTED:
1847 /* valid state */
1848 break;
1849 default:
1850 LOGE("Invalid state %d", mState);
1851 pthread_mutex_unlock(&mMutex);
1852 return -ENODEV;
1853 }
1854
1855 /* Check whether we have video stream */
1856 m_bIs4KVideo = false;
1857 m_bIsVideo = false;
1858 m_bEisSupportedSize = false;
1859 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001860 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001861 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001862 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001863 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001864 uint32_t videoWidth = 0U;
1865 uint32_t videoHeight = 0U;
1866 size_t rawStreamCnt = 0;
1867 size_t stallStreamCnt = 0;
1868 size_t processedStreamCnt = 0;
1869 // Number of streams on ISP encoder path
1870 size_t numStreamsOnEncoder = 0;
1871 size_t numYuv888OnEncoder = 0;
1872 bool bYuv888OverrideJpeg = false;
1873 cam_dimension_t largeYuv888Size = {0, 0};
1874 cam_dimension_t maxViewfinderSize = {0, 0};
1875 bool bJpegExceeds4K = false;
1876 bool bJpegOnEncoder = false;
1877 bool bUseCommonFeatureMask = false;
1878 cam_feature_mask_t commonFeatureMask = 0;
1879 bool bSmallJpegSize = false;
1880 uint32_t width_ratio;
1881 uint32_t height_ratio;
1882 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1883 camera3_stream_t *inputStream = NULL;
1884 bool isJpeg = false;
1885 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001886 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001887 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001888
1889 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1890
1891 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001892 uint8_t eis_prop_set;
1893 uint32_t maxEisWidth = 0;
1894 uint32_t maxEisHeight = 0;
1895
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001896 // Initialize all instant AEC related variables
1897 mInstantAEC = false;
1898 mResetInstantAEC = false;
1899 mInstantAECSettledFrameNumber = 0;
1900 mAecSkipDisplayFrameBound = 0;
1901 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001902 mCurrFeatureState = 0;
1903 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001904
Thierry Strudel3d639192016-09-09 11:52:26 -07001905 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1906
1907 size_t count = IS_TYPE_MAX;
1908 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1909 for (size_t i = 0; i < count; i++) {
1910 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001911 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1912 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001913 break;
1914 }
1915 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001916
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001917 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001918 maxEisWidth = MAX_EIS_WIDTH;
1919 maxEisHeight = MAX_EIS_HEIGHT;
1920 }
1921
1922 /* EIS setprop control */
1923 char eis_prop[PROPERTY_VALUE_MAX];
1924 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001925 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001926 eis_prop_set = (uint8_t)atoi(eis_prop);
1927
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001928 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001929 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1930
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001931 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1932 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001933
Thierry Strudel3d639192016-09-09 11:52:26 -07001934 /* stream configurations */
1935 for (size_t i = 0; i < streamList->num_streams; i++) {
1936 camera3_stream_t *newStream = streamList->streams[i];
1937 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1938 "height = %d, rotation = %d, usage = 0x%x",
1939 i, newStream->stream_type, newStream->format,
1940 newStream->width, newStream->height, newStream->rotation,
1941 newStream->usage);
1942 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1943 newStream->stream_type == CAMERA3_STREAM_INPUT){
1944 isZsl = true;
1945 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001946 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1947 IS_USAGE_PREVIEW(newStream->usage)) {
1948 isPreview = true;
1949 }
1950
Thierry Strudel3d639192016-09-09 11:52:26 -07001951 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1952 inputStream = newStream;
1953 }
1954
Emilian Peev7650c122017-01-19 08:24:33 -08001955 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1956 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001957 isJpeg = true;
1958 jpegSize.width = newStream->width;
1959 jpegSize.height = newStream->height;
1960 if (newStream->width > VIDEO_4K_WIDTH ||
1961 newStream->height > VIDEO_4K_HEIGHT)
1962 bJpegExceeds4K = true;
1963 }
1964
1965 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1966 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1967 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001968 // In HAL3 we can have multiple different video streams.
1969 // The variables video width and height are used below as
1970 // dimensions of the biggest of them
1971 if (videoWidth < newStream->width ||
1972 videoHeight < newStream->height) {
1973 videoWidth = newStream->width;
1974 videoHeight = newStream->height;
1975 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001976 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1977 (VIDEO_4K_HEIGHT <= newStream->height)) {
1978 m_bIs4KVideo = true;
1979 }
1980 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1981 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001982
Thierry Strudel3d639192016-09-09 11:52:26 -07001983 }
1984 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1985 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1986 switch (newStream->format) {
1987 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001988 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1989 depthPresent = true;
1990 break;
1991 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001992 stallStreamCnt++;
1993 if (isOnEncoder(maxViewfinderSize, newStream->width,
1994 newStream->height)) {
1995 numStreamsOnEncoder++;
1996 bJpegOnEncoder = true;
1997 }
1998 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1999 newStream->width);
2000 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
2001 newStream->height);;
2002 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2003 "FATAL: max_downscale_factor cannot be zero and so assert");
2004 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2005 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2006 LOGH("Setting small jpeg size flag to true");
2007 bSmallJpegSize = true;
2008 }
2009 break;
2010 case HAL_PIXEL_FORMAT_RAW10:
2011 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2012 case HAL_PIXEL_FORMAT_RAW16:
2013 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002014 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2015 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2016 pdStatCount++;
2017 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002018 break;
2019 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2020 processedStreamCnt++;
2021 if (isOnEncoder(maxViewfinderSize, newStream->width,
2022 newStream->height)) {
2023 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2024 !IS_USAGE_ZSL(newStream->usage)) {
2025 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2026 }
2027 numStreamsOnEncoder++;
2028 }
2029 break;
2030 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2031 processedStreamCnt++;
2032 if (isOnEncoder(maxViewfinderSize, newStream->width,
2033 newStream->height)) {
2034 // If Yuv888 size is not greater than 4K, set feature mask
2035 // to SUPERSET so that it support concurrent request on
2036 // YUV and JPEG.
2037 if (newStream->width <= VIDEO_4K_WIDTH &&
2038 newStream->height <= VIDEO_4K_HEIGHT) {
2039 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2040 }
2041 numStreamsOnEncoder++;
2042 numYuv888OnEncoder++;
2043 largeYuv888Size.width = newStream->width;
2044 largeYuv888Size.height = newStream->height;
2045 }
2046 break;
2047 default:
2048 processedStreamCnt++;
2049 if (isOnEncoder(maxViewfinderSize, newStream->width,
2050 newStream->height)) {
2051 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2052 numStreamsOnEncoder++;
2053 }
2054 break;
2055 }
2056
2057 }
2058 }
2059
2060 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2061 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2062 !m_bIsVideo) {
2063 m_bEisEnable = false;
2064 }
2065
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002066 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2067 pthread_mutex_unlock(&mMutex);
2068 return -EINVAL;
2069 }
2070
Thierry Strudel54dc9782017-02-15 12:12:10 -08002071 uint8_t forceEnableTnr = 0;
2072 char tnr_prop[PROPERTY_VALUE_MAX];
2073 memset(tnr_prop, 0, sizeof(tnr_prop));
2074 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2075 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2076
Thierry Strudel3d639192016-09-09 11:52:26 -07002077 /* Logic to enable/disable TNR based on specific config size/etc.*/
2078 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002079 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2080 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002081 else if (forceEnableTnr)
2082 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002083
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002084 char videoHdrProp[PROPERTY_VALUE_MAX];
2085 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2086 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2087 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2088
2089 if (hdr_mode_prop == 1 && m_bIsVideo &&
2090 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2091 m_bVideoHdrEnabled = true;
2092 else
2093 m_bVideoHdrEnabled = false;
2094
2095
Thierry Strudel3d639192016-09-09 11:52:26 -07002096 /* Check if num_streams is sane */
2097 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2098 rawStreamCnt > MAX_RAW_STREAMS ||
2099 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2100 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2101 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2102 pthread_mutex_unlock(&mMutex);
2103 return -EINVAL;
2104 }
2105 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002106 if (isZsl && m_bIs4KVideo) {
2107 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002108 pthread_mutex_unlock(&mMutex);
2109 return -EINVAL;
2110 }
2111 /* Check if stream sizes are sane */
2112 if (numStreamsOnEncoder > 2) {
2113 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2114 pthread_mutex_unlock(&mMutex);
2115 return -EINVAL;
2116 } else if (1 < numStreamsOnEncoder){
2117 bUseCommonFeatureMask = true;
2118 LOGH("Multiple streams above max viewfinder size, common mask needed");
2119 }
2120
2121 /* Check if BLOB size is greater than 4k in 4k recording case */
2122 if (m_bIs4KVideo && bJpegExceeds4K) {
2123 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2124 pthread_mutex_unlock(&mMutex);
2125 return -EINVAL;
2126 }
2127
Emilian Peev7650c122017-01-19 08:24:33 -08002128 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2129 depthPresent) {
2130 LOGE("HAL doesn't support depth streams in HFR mode!");
2131 pthread_mutex_unlock(&mMutex);
2132 return -EINVAL;
2133 }
2134
Thierry Strudel3d639192016-09-09 11:52:26 -07002135 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2136 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2137 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2138 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2139 // configurations:
2140 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2141 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2142 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2143 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2144 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2145 __func__);
2146 pthread_mutex_unlock(&mMutex);
2147 return -EINVAL;
2148 }
2149
2150 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2151 // the YUV stream's size is greater or equal to the JPEG size, set common
2152 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2153 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2154 jpegSize.width, jpegSize.height) &&
2155 largeYuv888Size.width > jpegSize.width &&
2156 largeYuv888Size.height > jpegSize.height) {
2157 bYuv888OverrideJpeg = true;
2158 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2159 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2160 }
2161
2162 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2163 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2164 commonFeatureMask);
2165 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2166 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2167
2168 rc = validateStreamDimensions(streamList);
2169 if (rc == NO_ERROR) {
2170 rc = validateStreamRotations(streamList);
2171 }
2172 if (rc != NO_ERROR) {
2173 LOGE("Invalid stream configuration requested!");
2174 pthread_mutex_unlock(&mMutex);
2175 return rc;
2176 }
2177
Emilian Peev0f3c3162017-03-15 12:57:46 +00002178 if (1 < pdStatCount) {
2179 LOGE("HAL doesn't support multiple PD streams");
2180 pthread_mutex_unlock(&mMutex);
2181 return -EINVAL;
2182 }
2183
2184 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2185 (1 == pdStatCount)) {
2186 LOGE("HAL doesn't support PD streams in HFR mode!");
2187 pthread_mutex_unlock(&mMutex);
2188 return -EINVAL;
2189 }
2190
Thierry Strudel3d639192016-09-09 11:52:26 -07002191 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2192 for (size_t i = 0; i < streamList->num_streams; i++) {
2193 camera3_stream_t *newStream = streamList->streams[i];
2194 LOGH("newStream type = %d, stream format = %d "
2195 "stream size : %d x %d, stream rotation = %d",
2196 newStream->stream_type, newStream->format,
2197 newStream->width, newStream->height, newStream->rotation);
2198 //if the stream is in the mStreamList validate it
2199 bool stream_exists = false;
2200 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2201 it != mStreamInfo.end(); it++) {
2202 if ((*it)->stream == newStream) {
2203 QCamera3ProcessingChannel *channel =
2204 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2205 stream_exists = true;
2206 if (channel)
2207 delete channel;
2208 (*it)->status = VALID;
2209 (*it)->stream->priv = NULL;
2210 (*it)->channel = NULL;
2211 }
2212 }
2213 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2214 //new stream
2215 stream_info_t* stream_info;
2216 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2217 if (!stream_info) {
2218 LOGE("Could not allocate stream info");
2219 rc = -ENOMEM;
2220 pthread_mutex_unlock(&mMutex);
2221 return rc;
2222 }
2223 stream_info->stream = newStream;
2224 stream_info->status = VALID;
2225 stream_info->channel = NULL;
2226 mStreamInfo.push_back(stream_info);
2227 }
2228 /* Covers Opaque ZSL and API1 F/W ZSL */
2229 if (IS_USAGE_ZSL(newStream->usage)
2230 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2231 if (zslStream != NULL) {
2232 LOGE("Multiple input/reprocess streams requested!");
2233 pthread_mutex_unlock(&mMutex);
2234 return BAD_VALUE;
2235 }
2236 zslStream = newStream;
2237 }
2238 /* Covers YUV reprocess */
2239 if (inputStream != NULL) {
2240 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2241 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2242 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2243 && inputStream->width == newStream->width
2244 && inputStream->height == newStream->height) {
2245 if (zslStream != NULL) {
2246 /* This scenario indicates multiple YUV streams with same size
2247 * as input stream have been requested, since zsl stream handle
2248 * is solely use for the purpose of overriding the size of streams
2249 * which share h/w streams we will just make a guess here as to
2250 * which of the stream is a ZSL stream, this will be refactored
2251 * once we make generic logic for streams sharing encoder output
2252 */
2253 LOGH("Warning, Multiple ip/reprocess streams requested!");
2254 }
2255 zslStream = newStream;
2256 }
2257 }
2258 }
2259
2260 /* If a zsl stream is set, we know that we have configured at least one input or
2261 bidirectional stream */
2262 if (NULL != zslStream) {
2263 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2264 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2265 mInputStreamInfo.format = zslStream->format;
2266 mInputStreamInfo.usage = zslStream->usage;
2267 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2268 mInputStreamInfo.dim.width,
2269 mInputStreamInfo.dim.height,
2270 mInputStreamInfo.format, mInputStreamInfo.usage);
2271 }
2272
2273 cleanAndSortStreamInfo();
2274 if (mMetadataChannel) {
2275 delete mMetadataChannel;
2276 mMetadataChannel = NULL;
2277 }
2278 if (mSupportChannel) {
2279 delete mSupportChannel;
2280 mSupportChannel = NULL;
2281 }
2282
2283 if (mAnalysisChannel) {
2284 delete mAnalysisChannel;
2285 mAnalysisChannel = NULL;
2286 }
2287
2288 if (mDummyBatchChannel) {
2289 delete mDummyBatchChannel;
2290 mDummyBatchChannel = NULL;
2291 }
2292
Emilian Peev7650c122017-01-19 08:24:33 -08002293 if (mDepthChannel) {
2294 mDepthChannel = NULL;
2295 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002296 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002297
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002298 mShutterDispatcher.clear();
2299 mOutputBufferDispatcher.clear();
2300
Thierry Strudel2896d122017-02-23 19:18:03 -08002301 char is_type_value[PROPERTY_VALUE_MAX];
2302 property_get("persist.camera.is_type", is_type_value, "4");
2303 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2304
Binhao Line406f062017-05-03 14:39:44 -07002305 char property_value[PROPERTY_VALUE_MAX];
2306 property_get("persist.camera.gzoom.at", property_value, "0");
2307 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002308 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2309 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2310 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2311 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002312
2313 property_get("persist.camera.gzoom.4k", property_value, "0");
2314 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2315
Thierry Strudel3d639192016-09-09 11:52:26 -07002316 //Create metadata channel and initialize it
2317 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2318 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2319 gCamCapability[mCameraId]->color_arrangement);
2320 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2321 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002322 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002323 if (mMetadataChannel == NULL) {
2324 LOGE("failed to allocate metadata channel");
2325 rc = -ENOMEM;
2326 pthread_mutex_unlock(&mMutex);
2327 return rc;
2328 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002329 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002330 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2331 if (rc < 0) {
2332 LOGE("metadata channel initialization failed");
2333 delete mMetadataChannel;
2334 mMetadataChannel = NULL;
2335 pthread_mutex_unlock(&mMutex);
2336 return rc;
2337 }
2338
Thierry Strudel2896d122017-02-23 19:18:03 -08002339 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002340 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002341 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002342 // Keep track of preview/video streams indices.
2343 // There could be more than one preview streams, but only one video stream.
2344 int32_t video_stream_idx = -1;
2345 int32_t preview_stream_idx[streamList->num_streams];
2346 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002347 bool previewTnr[streamList->num_streams];
2348 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2349 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2350 // Loop through once to determine preview TNR conditions before creating channels.
2351 for (size_t i = 0; i < streamList->num_streams; i++) {
2352 camera3_stream_t *newStream = streamList->streams[i];
2353 uint32_t stream_usage = newStream->usage;
2354 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2355 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2356 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2357 video_stream_idx = (int32_t)i;
2358 else
2359 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2360 }
2361 }
2362 // By default, preview stream TNR is disabled.
2363 // Enable TNR to the preview stream if all conditions below are satisfied:
2364 // 1. preview resolution == video resolution.
2365 // 2. video stream TNR is enabled.
2366 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2367 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2368 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2369 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2370 if (m_bTnrEnabled && m_bTnrVideo &&
2371 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2372 video_stream->width == preview_stream->width &&
2373 video_stream->height == preview_stream->height) {
2374 previewTnr[preview_stream_idx[i]] = true;
2375 }
2376 }
2377
Thierry Strudel3d639192016-09-09 11:52:26 -07002378 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2379 /* Allocate channel objects for the requested streams */
2380 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002381
Thierry Strudel3d639192016-09-09 11:52:26 -07002382 camera3_stream_t *newStream = streamList->streams[i];
2383 uint32_t stream_usage = newStream->usage;
2384 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2385 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2386 struct camera_info *p_info = NULL;
2387 pthread_mutex_lock(&gCamLock);
2388 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2389 pthread_mutex_unlock(&gCamLock);
2390 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2391 || IS_USAGE_ZSL(newStream->usage)) &&
2392 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002393 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002394 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002395 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2396 if (bUseCommonFeatureMask)
2397 zsl_ppmask = commonFeatureMask;
2398 else
2399 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002400 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002401 if (numStreamsOnEncoder > 0)
2402 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2403 else
2404 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002405 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002406 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002407 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002408 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002409 LOGH("Input stream configured, reprocess config");
2410 } else {
2411 //for non zsl streams find out the format
2412 switch (newStream->format) {
2413 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2414 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002415 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002416 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2417 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2418 /* add additional features to pp feature mask */
2419 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2420 mStreamConfigInfo.num_streams);
2421
2422 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2423 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2424 CAM_STREAM_TYPE_VIDEO;
2425 if (m_bTnrEnabled && m_bTnrVideo) {
2426 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2427 CAM_QCOM_FEATURE_CPP_TNR;
2428 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2429 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2430 ~CAM_QCOM_FEATURE_CDS;
2431 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002432 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2433 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2434 CAM_QTI_FEATURE_PPEISCORE;
2435 }
Binhao Line406f062017-05-03 14:39:44 -07002436 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2438 CAM_QCOM_FEATURE_GOOG_ZOOM;
2439 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002440 } else {
2441 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2442 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002443 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002444 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2445 CAM_QCOM_FEATURE_CPP_TNR;
2446 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2447 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2448 ~CAM_QCOM_FEATURE_CDS;
2449 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002450 if(!m_bSwTnrPreview) {
2451 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2452 ~CAM_QTI_FEATURE_SW_TNR;
2453 }
Binhao Line406f062017-05-03 14:39:44 -07002454 if (is_goog_zoom_preview_enabled) {
2455 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2456 CAM_QCOM_FEATURE_GOOG_ZOOM;
2457 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002458 padding_info.width_padding = mSurfaceStridePadding;
2459 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002460 previewSize.width = (int32_t)newStream->width;
2461 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002462 }
2463 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2464 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2465 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2466 newStream->height;
2467 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2468 newStream->width;
2469 }
2470 }
2471 break;
2472 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002473 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002474 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2475 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2476 if (bUseCommonFeatureMask)
2477 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2478 commonFeatureMask;
2479 else
2480 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2481 CAM_QCOM_FEATURE_NONE;
2482 } else {
2483 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2484 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2485 }
2486 break;
2487 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002488 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002489 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2490 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2491 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2492 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2493 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002494 /* Remove rotation if it is not supported
2495 for 4K LiveVideo snapshot case (online processing) */
2496 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2497 CAM_QCOM_FEATURE_ROTATION)) {
2498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2499 &= ~CAM_QCOM_FEATURE_ROTATION;
2500 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002501 } else {
2502 if (bUseCommonFeatureMask &&
2503 isOnEncoder(maxViewfinderSize, newStream->width,
2504 newStream->height)) {
2505 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2506 } else {
2507 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2508 }
2509 }
2510 if (isZsl) {
2511 if (zslStream) {
2512 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2513 (int32_t)zslStream->width;
2514 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2515 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002516 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2517 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002518 } else {
2519 LOGE("Error, No ZSL stream identified");
2520 pthread_mutex_unlock(&mMutex);
2521 return -EINVAL;
2522 }
2523 } else if (m_bIs4KVideo) {
2524 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2525 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2526 } else if (bYuv888OverrideJpeg) {
2527 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2528 (int32_t)largeYuv888Size.width;
2529 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2530 (int32_t)largeYuv888Size.height;
2531 }
2532 break;
2533 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2534 case HAL_PIXEL_FORMAT_RAW16:
2535 case HAL_PIXEL_FORMAT_RAW10:
2536 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2537 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2538 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002539 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2540 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2541 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2542 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2543 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2544 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2545 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2546 gCamCapability[mCameraId]->dt[mPDIndex];
2547 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2548 gCamCapability[mCameraId]->vc[mPDIndex];
2549 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002550 break;
2551 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002552 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002553 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2554 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2555 break;
2556 }
2557 }
2558
2559 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2560 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2561 gCamCapability[mCameraId]->color_arrangement);
2562
2563 if (newStream->priv == NULL) {
2564 //New stream, construct channel
2565 switch (newStream->stream_type) {
2566 case CAMERA3_STREAM_INPUT:
2567 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2568 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2569 break;
2570 case CAMERA3_STREAM_BIDIRECTIONAL:
2571 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2572 GRALLOC_USAGE_HW_CAMERA_WRITE;
2573 break;
2574 case CAMERA3_STREAM_OUTPUT:
2575 /* For video encoding stream, set read/write rarely
2576 * flag so that they may be set to un-cached */
2577 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2578 newStream->usage |=
2579 (GRALLOC_USAGE_SW_READ_RARELY |
2580 GRALLOC_USAGE_SW_WRITE_RARELY |
2581 GRALLOC_USAGE_HW_CAMERA_WRITE);
2582 else if (IS_USAGE_ZSL(newStream->usage))
2583 {
2584 LOGD("ZSL usage flag skipping");
2585 }
2586 else if (newStream == zslStream
2587 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2588 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2589 } else
2590 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2591 break;
2592 default:
2593 LOGE("Invalid stream_type %d", newStream->stream_type);
2594 break;
2595 }
2596
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002597 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002598 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2599 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2600 QCamera3ProcessingChannel *channel = NULL;
2601 switch (newStream->format) {
2602 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2603 if ((newStream->usage &
2604 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2605 (streamList->operation_mode ==
2606 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2607 ) {
2608 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2609 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002610 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002611 this,
2612 newStream,
2613 (cam_stream_type_t)
2614 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2615 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2616 mMetadataChannel,
2617 0); //heap buffers are not required for HFR video channel
2618 if (channel == NULL) {
2619 LOGE("allocation of channel failed");
2620 pthread_mutex_unlock(&mMutex);
2621 return -ENOMEM;
2622 }
2623 //channel->getNumBuffers() will return 0 here so use
2624 //MAX_INFLIGH_HFR_REQUESTS
2625 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2626 newStream->priv = channel;
2627 LOGI("num video buffers in HFR mode: %d",
2628 MAX_INFLIGHT_HFR_REQUESTS);
2629 } else {
2630 /* Copy stream contents in HFR preview only case to create
2631 * dummy batch channel so that sensor streaming is in
2632 * HFR mode */
2633 if (!m_bIsVideo && (streamList->operation_mode ==
2634 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2635 mDummyBatchStream = *newStream;
2636 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002637 int bufferCount = MAX_INFLIGHT_REQUESTS;
2638 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2639 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002640 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2641 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2642 bufferCount = m_bIs4KVideo ?
2643 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2644 }
2645
Thierry Strudel2896d122017-02-23 19:18:03 -08002646 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002647 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2648 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002649 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002650 this,
2651 newStream,
2652 (cam_stream_type_t)
2653 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2654 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2655 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002656 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002657 if (channel == NULL) {
2658 LOGE("allocation of channel failed");
2659 pthread_mutex_unlock(&mMutex);
2660 return -ENOMEM;
2661 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002662 /* disable UBWC for preview, though supported,
2663 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002664 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002665 (previewSize.width == (int32_t)videoWidth)&&
2666 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002667 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002668 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002669 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002670 /* When goog_zoom is linked to the preview or video stream,
2671 * disable ubwc to the linked stream */
2672 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2673 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2674 channel->setUBWCEnabled(false);
2675 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002676 newStream->max_buffers = channel->getNumBuffers();
2677 newStream->priv = channel;
2678 }
2679 break;
2680 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2681 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2682 mChannelHandle,
2683 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002684 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002685 this,
2686 newStream,
2687 (cam_stream_type_t)
2688 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2689 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2690 mMetadataChannel);
2691 if (channel == NULL) {
2692 LOGE("allocation of YUV channel failed");
2693 pthread_mutex_unlock(&mMutex);
2694 return -ENOMEM;
2695 }
2696 newStream->max_buffers = channel->getNumBuffers();
2697 newStream->priv = channel;
2698 break;
2699 }
2700 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2701 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002702 case HAL_PIXEL_FORMAT_RAW10: {
2703 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2704 (HAL_DATASPACE_DEPTH != newStream->data_space))
2705 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 mRawChannel = new QCamera3RawChannel(
2707 mCameraHandle->camera_handle, mChannelHandle,
2708 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002709 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002710 this, newStream,
2711 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002712 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002713 if (mRawChannel == NULL) {
2714 LOGE("allocation of raw channel failed");
2715 pthread_mutex_unlock(&mMutex);
2716 return -ENOMEM;
2717 }
2718 newStream->max_buffers = mRawChannel->getNumBuffers();
2719 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2720 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002721 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002722 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002723 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2724 mDepthChannel = new QCamera3DepthChannel(
2725 mCameraHandle->camera_handle, mChannelHandle,
2726 mCameraHandle->ops, NULL, NULL, &padding_info,
2727 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2728 mMetadataChannel);
2729 if (NULL == mDepthChannel) {
2730 LOGE("Allocation of depth channel failed");
2731 pthread_mutex_unlock(&mMutex);
2732 return NO_MEMORY;
2733 }
2734 newStream->priv = mDepthChannel;
2735 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2736 } else {
2737 // Max live snapshot inflight buffer is 1. This is to mitigate
2738 // frame drop issues for video snapshot. The more buffers being
2739 // allocated, the more frame drops there are.
2740 mPictureChannel = new QCamera3PicChannel(
2741 mCameraHandle->camera_handle, mChannelHandle,
2742 mCameraHandle->ops, captureResultCb,
2743 setBufferErrorStatus, &padding_info, this, newStream,
2744 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2745 m_bIs4KVideo, isZsl, mMetadataChannel,
2746 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2747 if (mPictureChannel == NULL) {
2748 LOGE("allocation of channel failed");
2749 pthread_mutex_unlock(&mMutex);
2750 return -ENOMEM;
2751 }
2752 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2753 newStream->max_buffers = mPictureChannel->getNumBuffers();
2754 mPictureChannel->overrideYuvSize(
2755 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2756 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002757 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002758 break;
2759
2760 default:
2761 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002762 pthread_mutex_unlock(&mMutex);
2763 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002764 }
2765 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2766 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2767 } else {
2768 LOGE("Error, Unknown stream type");
2769 pthread_mutex_unlock(&mMutex);
2770 return -EINVAL;
2771 }
2772
2773 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002774 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002775 // Here we only care whether it's EIS3 or not
2776 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2777 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2778 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2779 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002780 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002781 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002782 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002783 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2784 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2785 }
2786 }
2787
2788 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2789 it != mStreamInfo.end(); it++) {
2790 if ((*it)->stream == newStream) {
2791 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2792 break;
2793 }
2794 }
2795 } else {
2796 // Channel already exists for this stream
2797 // Do nothing for now
2798 }
2799 padding_info = gCamCapability[mCameraId]->padding_info;
2800
Emilian Peev7650c122017-01-19 08:24:33 -08002801 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002802 * since there is no real stream associated with it
2803 */
Emilian Peev7650c122017-01-19 08:24:33 -08002804 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002805 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2806 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002807 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002808 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002809 }
2810
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002811 // Let buffer dispatcher know the configured streams.
2812 mOutputBufferDispatcher.configureStreams(streamList);
2813
Thierry Strudel2896d122017-02-23 19:18:03 -08002814 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2815 onlyRaw = false;
2816 }
2817
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002818 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002819 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002820 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002821 cam_analysis_info_t analysisInfo;
2822 int32_t ret = NO_ERROR;
2823 ret = mCommon.getAnalysisInfo(
2824 FALSE,
2825 analysisFeatureMask,
2826 &analysisInfo);
2827 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002828 cam_color_filter_arrangement_t analysis_color_arrangement =
2829 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2830 CAM_FILTER_ARRANGEMENT_Y :
2831 gCamCapability[mCameraId]->color_arrangement);
2832 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2833 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002834 cam_dimension_t analysisDim;
2835 analysisDim = mCommon.getMatchingDimension(previewSize,
2836 analysisInfo.analysis_recommended_res);
2837
2838 mAnalysisChannel = new QCamera3SupportChannel(
2839 mCameraHandle->camera_handle,
2840 mChannelHandle,
2841 mCameraHandle->ops,
2842 &analysisInfo.analysis_padding_info,
2843 analysisFeatureMask,
2844 CAM_STREAM_TYPE_ANALYSIS,
2845 &analysisDim,
2846 (analysisInfo.analysis_format
2847 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2848 : CAM_FORMAT_YUV_420_NV21),
2849 analysisInfo.hw_analysis_supported,
2850 gCamCapability[mCameraId]->color_arrangement,
2851 this,
2852 0); // force buffer count to 0
2853 } else {
2854 LOGW("getAnalysisInfo failed, ret = %d", ret);
2855 }
2856 if (!mAnalysisChannel) {
2857 LOGW("Analysis channel cannot be created");
2858 }
2859 }
2860
Thierry Strudel3d639192016-09-09 11:52:26 -07002861 //RAW DUMP channel
2862 if (mEnableRawDump && isRawStreamRequested == false){
2863 cam_dimension_t rawDumpSize;
2864 rawDumpSize = getMaxRawSize(mCameraId);
2865 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2866 setPAAFSupport(rawDumpFeatureMask,
2867 CAM_STREAM_TYPE_RAW,
2868 gCamCapability[mCameraId]->color_arrangement);
2869 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2870 mChannelHandle,
2871 mCameraHandle->ops,
2872 rawDumpSize,
2873 &padding_info,
2874 this, rawDumpFeatureMask);
2875 if (!mRawDumpChannel) {
2876 LOGE("Raw Dump channel cannot be created");
2877 pthread_mutex_unlock(&mMutex);
2878 return -ENOMEM;
2879 }
2880 }
2881
Thierry Strudel3d639192016-09-09 11:52:26 -07002882 if (mAnalysisChannel) {
2883 cam_analysis_info_t analysisInfo;
2884 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2885 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2886 CAM_STREAM_TYPE_ANALYSIS;
2887 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2888 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002889 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002890 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2891 &analysisInfo);
2892 if (rc != NO_ERROR) {
2893 LOGE("getAnalysisInfo failed, ret = %d", rc);
2894 pthread_mutex_unlock(&mMutex);
2895 return rc;
2896 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002897 cam_color_filter_arrangement_t analysis_color_arrangement =
2898 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2899 CAM_FILTER_ARRANGEMENT_Y :
2900 gCamCapability[mCameraId]->color_arrangement);
2901 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2902 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2903 analysis_color_arrangement);
2904
Thierry Strudel3d639192016-09-09 11:52:26 -07002905 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002906 mCommon.getMatchingDimension(previewSize,
2907 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002908 mStreamConfigInfo.num_streams++;
2909 }
2910
Thierry Strudel2896d122017-02-23 19:18:03 -08002911 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002912 cam_analysis_info_t supportInfo;
2913 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2914 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2915 setPAAFSupport(callbackFeatureMask,
2916 CAM_STREAM_TYPE_CALLBACK,
2917 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002918 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002919 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002920 if (ret != NO_ERROR) {
2921 /* Ignore the error for Mono camera
2922 * because the PAAF bit mask is only set
2923 * for CAM_STREAM_TYPE_ANALYSIS stream type
2924 */
2925 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2926 LOGW("getAnalysisInfo failed, ret = %d", ret);
2927 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002928 }
2929 mSupportChannel = new QCamera3SupportChannel(
2930 mCameraHandle->camera_handle,
2931 mChannelHandle,
2932 mCameraHandle->ops,
2933 &gCamCapability[mCameraId]->padding_info,
2934 callbackFeatureMask,
2935 CAM_STREAM_TYPE_CALLBACK,
2936 &QCamera3SupportChannel::kDim,
2937 CAM_FORMAT_YUV_420_NV21,
2938 supportInfo.hw_analysis_supported,
2939 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002940 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002941 if (!mSupportChannel) {
2942 LOGE("dummy channel cannot be created");
2943 pthread_mutex_unlock(&mMutex);
2944 return -ENOMEM;
2945 }
2946 }
2947
2948 if (mSupportChannel) {
2949 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2950 QCamera3SupportChannel::kDim;
2951 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2952 CAM_STREAM_TYPE_CALLBACK;
2953 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2954 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2955 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2956 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2957 gCamCapability[mCameraId]->color_arrangement);
2958 mStreamConfigInfo.num_streams++;
2959 }
2960
2961 if (mRawDumpChannel) {
2962 cam_dimension_t rawSize;
2963 rawSize = getMaxRawSize(mCameraId);
2964 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2965 rawSize;
2966 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2967 CAM_STREAM_TYPE_RAW;
2968 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2969 CAM_QCOM_FEATURE_NONE;
2970 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2971 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2972 gCamCapability[mCameraId]->color_arrangement);
2973 mStreamConfigInfo.num_streams++;
2974 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002975
2976 if (mHdrPlusRawSrcChannel) {
2977 cam_dimension_t rawSize;
2978 rawSize = getMaxRawSize(mCameraId);
2979 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2980 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2981 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2982 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2983 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2984 gCamCapability[mCameraId]->color_arrangement);
2985 mStreamConfigInfo.num_streams++;
2986 }
2987
Thierry Strudel3d639192016-09-09 11:52:26 -07002988 /* In HFR mode, if video stream is not added, create a dummy channel so that
2989 * ISP can create a batch mode even for preview only case. This channel is
2990 * never 'start'ed (no stream-on), it is only 'initialized' */
2991 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2992 !m_bIsVideo) {
2993 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2994 setPAAFSupport(dummyFeatureMask,
2995 CAM_STREAM_TYPE_VIDEO,
2996 gCamCapability[mCameraId]->color_arrangement);
2997 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2998 mChannelHandle,
2999 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003000 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07003001 this,
3002 &mDummyBatchStream,
3003 CAM_STREAM_TYPE_VIDEO,
3004 dummyFeatureMask,
3005 mMetadataChannel);
3006 if (NULL == mDummyBatchChannel) {
3007 LOGE("creation of mDummyBatchChannel failed."
3008 "Preview will use non-hfr sensor mode ");
3009 }
3010 }
3011 if (mDummyBatchChannel) {
3012 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3013 mDummyBatchStream.width;
3014 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3015 mDummyBatchStream.height;
3016 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3017 CAM_STREAM_TYPE_VIDEO;
3018 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3019 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3020 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3021 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3022 gCamCapability[mCameraId]->color_arrangement);
3023 mStreamConfigInfo.num_streams++;
3024 }
3025
3026 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3027 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003028 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003029 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003030
3031 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3032 for (pendingRequestIterator i = mPendingRequestsList.begin();
3033 i != mPendingRequestsList.end();) {
3034 i = erasePendingRequest(i);
3035 }
3036 mPendingFrameDropList.clear();
3037 // Initialize/Reset the pending buffers list
3038 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3039 req.mPendingBufferList.clear();
3040 }
3041 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003042 mExpectedInflightDuration = 0;
3043 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003044
Thierry Strudel3d639192016-09-09 11:52:26 -07003045 mCurJpegMeta.clear();
3046 //Get min frame duration for this streams configuration
3047 deriveMinFrameDuration();
3048
Chien-Yu Chenee335912017-02-09 17:53:20 -08003049 mFirstPreviewIntentSeen = false;
3050
3051 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003052 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003053 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3054 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003055 disableHdrPlusModeLocked();
3056 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003057
Thierry Strudel3d639192016-09-09 11:52:26 -07003058 // Update state
3059 mState = CONFIGURED;
3060
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003061 mFirstMetadataCallback = true;
3062
Thierry Strudel3d639192016-09-09 11:52:26 -07003063 pthread_mutex_unlock(&mMutex);
3064
3065 return rc;
3066}
3067
3068/*===========================================================================
3069 * FUNCTION : validateCaptureRequest
3070 *
3071 * DESCRIPTION: validate a capture request from camera service
3072 *
3073 * PARAMETERS :
3074 * @request : request from framework to process
3075 *
3076 * RETURN :
3077 *
3078 *==========================================================================*/
3079int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003080 camera3_capture_request_t *request,
3081 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003082{
3083 ssize_t idx = 0;
3084 const camera3_stream_buffer_t *b;
3085 CameraMetadata meta;
3086
3087 /* Sanity check the request */
3088 if (request == NULL) {
3089 LOGE("NULL capture request");
3090 return BAD_VALUE;
3091 }
3092
3093 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3094 /*settings cannot be null for the first request*/
3095 return BAD_VALUE;
3096 }
3097
3098 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003099 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3100 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003101 LOGE("Request %d: No output buffers provided!",
3102 __FUNCTION__, frameNumber);
3103 return BAD_VALUE;
3104 }
3105 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3106 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3107 request->num_output_buffers, MAX_NUM_STREAMS);
3108 return BAD_VALUE;
3109 }
3110 if (request->input_buffer != NULL) {
3111 b = request->input_buffer;
3112 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3113 LOGE("Request %d: Buffer %ld: Status not OK!",
3114 frameNumber, (long)idx);
3115 return BAD_VALUE;
3116 }
3117 if (b->release_fence != -1) {
3118 LOGE("Request %d: Buffer %ld: Has a release fence!",
3119 frameNumber, (long)idx);
3120 return BAD_VALUE;
3121 }
3122 if (b->buffer == NULL) {
3123 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3124 frameNumber, (long)idx);
3125 return BAD_VALUE;
3126 }
3127 }
3128
3129 // Validate all buffers
3130 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003131 if (b == NULL) {
3132 return BAD_VALUE;
3133 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003134 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003135 QCamera3ProcessingChannel *channel =
3136 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3137 if (channel == NULL) {
3138 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3139 frameNumber, (long)idx);
3140 return BAD_VALUE;
3141 }
3142 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3143 LOGE("Request %d: Buffer %ld: Status not OK!",
3144 frameNumber, (long)idx);
3145 return BAD_VALUE;
3146 }
3147 if (b->release_fence != -1) {
3148 LOGE("Request %d: Buffer %ld: Has a release fence!",
3149 frameNumber, (long)idx);
3150 return BAD_VALUE;
3151 }
3152 if (b->buffer == NULL) {
3153 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3154 frameNumber, (long)idx);
3155 return BAD_VALUE;
3156 }
3157 if (*(b->buffer) == NULL) {
3158 LOGE("Request %d: Buffer %ld: NULL private handle!",
3159 frameNumber, (long)idx);
3160 return BAD_VALUE;
3161 }
3162 idx++;
3163 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003164 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003165 return NO_ERROR;
3166}
3167
3168/*===========================================================================
3169 * FUNCTION : deriveMinFrameDuration
3170 *
3171 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3172 * on currently configured streams.
3173 *
3174 * PARAMETERS : NONE
3175 *
3176 * RETURN : NONE
3177 *
3178 *==========================================================================*/
3179void QCamera3HardwareInterface::deriveMinFrameDuration()
3180{
3181 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003182 bool hasRaw = false;
3183
3184 mMinRawFrameDuration = 0;
3185 mMinJpegFrameDuration = 0;
3186 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003187
3188 maxJpegDim = 0;
3189 maxProcessedDim = 0;
3190 maxRawDim = 0;
3191
3192 // Figure out maximum jpeg, processed, and raw dimensions
3193 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3194 it != mStreamInfo.end(); it++) {
3195
3196 // Input stream doesn't have valid stream_type
3197 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3198 continue;
3199
3200 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3201 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3202 if (dimension > maxJpegDim)
3203 maxJpegDim = dimension;
3204 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3205 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3206 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003207 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003208 if (dimension > maxRawDim)
3209 maxRawDim = dimension;
3210 } else {
3211 if (dimension > maxProcessedDim)
3212 maxProcessedDim = dimension;
3213 }
3214 }
3215
3216 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3217 MAX_SIZES_CNT);
3218
3219 //Assume all jpeg dimensions are in processed dimensions.
3220 if (maxJpegDim > maxProcessedDim)
3221 maxProcessedDim = maxJpegDim;
3222 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003223 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003224 maxRawDim = INT32_MAX;
3225
3226 for (size_t i = 0; i < count; i++) {
3227 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3228 gCamCapability[mCameraId]->raw_dim[i].height;
3229 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3230 maxRawDim = dimension;
3231 }
3232 }
3233
3234 //Find minimum durations for processed, jpeg, and raw
3235 for (size_t i = 0; i < count; i++) {
3236 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3237 gCamCapability[mCameraId]->raw_dim[i].height) {
3238 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3239 break;
3240 }
3241 }
3242 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3243 for (size_t i = 0; i < count; i++) {
3244 if (maxProcessedDim ==
3245 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3246 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3247 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3248 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3249 break;
3250 }
3251 }
3252}
3253
3254/*===========================================================================
3255 * FUNCTION : getMinFrameDuration
3256 *
3257 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3258 * and current request configuration.
3259 *
3260 * PARAMETERS : @request: requset sent by the frameworks
3261 *
3262 * RETURN : min farme duration for a particular request
3263 *
3264 *==========================================================================*/
3265int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3266{
3267 bool hasJpegStream = false;
3268 bool hasRawStream = false;
3269 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3270 const camera3_stream_t *stream = request->output_buffers[i].stream;
3271 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3272 hasJpegStream = true;
3273 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3274 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3275 stream->format == HAL_PIXEL_FORMAT_RAW16)
3276 hasRawStream = true;
3277 }
3278
3279 if (!hasJpegStream)
3280 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3281 else
3282 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3283}
3284
3285/*===========================================================================
3286 * FUNCTION : handleBuffersDuringFlushLock
3287 *
3288 * DESCRIPTION: Account for buffers returned from back-end during flush
3289 * This function is executed while mMutex is held by the caller.
3290 *
3291 * PARAMETERS :
3292 * @buffer: image buffer for the callback
3293 *
3294 * RETURN :
3295 *==========================================================================*/
3296void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3297{
3298 bool buffer_found = false;
3299 for (List<PendingBuffersInRequest>::iterator req =
3300 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3301 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3302 for (List<PendingBufferInfo>::iterator i =
3303 req->mPendingBufferList.begin();
3304 i != req->mPendingBufferList.end(); i++) {
3305 if (i->buffer == buffer->buffer) {
3306 mPendingBuffersMap.numPendingBufsAtFlush--;
3307 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3308 buffer->buffer, req->frame_number,
3309 mPendingBuffersMap.numPendingBufsAtFlush);
3310 buffer_found = true;
3311 break;
3312 }
3313 }
3314 if (buffer_found) {
3315 break;
3316 }
3317 }
3318 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3319 //signal the flush()
3320 LOGD("All buffers returned to HAL. Continue flush");
3321 pthread_cond_signal(&mBuffersCond);
3322 }
3323}
3324
Thierry Strudel3d639192016-09-09 11:52:26 -07003325/*===========================================================================
3326 * FUNCTION : handleBatchMetadata
3327 *
3328 * DESCRIPTION: Handles metadata buffer callback in batch mode
3329 *
3330 * PARAMETERS : @metadata_buf: metadata buffer
3331 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3332 * the meta buf in this method
3333 *
3334 * RETURN :
3335 *
3336 *==========================================================================*/
3337void QCamera3HardwareInterface::handleBatchMetadata(
3338 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3339{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003340 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003341
3342 if (NULL == metadata_buf) {
3343 LOGE("metadata_buf is NULL");
3344 return;
3345 }
3346 /* In batch mode, the metdata will contain the frame number and timestamp of
3347 * the last frame in the batch. Eg: a batch containing buffers from request
3348 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3349 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3350 * multiple process_capture_results */
3351 metadata_buffer_t *metadata =
3352 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3353 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3354 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3355 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3356 uint32_t frame_number = 0, urgent_frame_number = 0;
3357 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3358 bool invalid_metadata = false;
3359 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3360 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003361 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003362
3363 int32_t *p_frame_number_valid =
3364 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3365 uint32_t *p_frame_number =
3366 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3367 int64_t *p_capture_time =
3368 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3369 int32_t *p_urgent_frame_number_valid =
3370 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3371 uint32_t *p_urgent_frame_number =
3372 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3373
3374 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3375 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3376 (NULL == p_urgent_frame_number)) {
3377 LOGE("Invalid metadata");
3378 invalid_metadata = true;
3379 } else {
3380 frame_number_valid = *p_frame_number_valid;
3381 last_frame_number = *p_frame_number;
3382 last_frame_capture_time = *p_capture_time;
3383 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3384 last_urgent_frame_number = *p_urgent_frame_number;
3385 }
3386
3387 /* In batchmode, when no video buffers are requested, set_parms are sent
3388 * for every capture_request. The difference between consecutive urgent
3389 * frame numbers and frame numbers should be used to interpolate the
3390 * corresponding frame numbers and time stamps */
3391 pthread_mutex_lock(&mMutex);
3392 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003393 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3394 if(idx < 0) {
3395 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3396 last_urgent_frame_number);
3397 mState = ERROR;
3398 pthread_mutex_unlock(&mMutex);
3399 return;
3400 }
3401 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003402 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3403 first_urgent_frame_number;
3404
3405 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3406 urgent_frame_number_valid,
3407 first_urgent_frame_number, last_urgent_frame_number);
3408 }
3409
3410 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003411 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3412 if(idx < 0) {
3413 LOGE("Invalid frame number received: %d. Irrecoverable error",
3414 last_frame_number);
3415 mState = ERROR;
3416 pthread_mutex_unlock(&mMutex);
3417 return;
3418 }
3419 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003420 frameNumDiff = last_frame_number + 1 -
3421 first_frame_number;
3422 mPendingBatchMap.removeItem(last_frame_number);
3423
3424 LOGD("frm: valid: %d frm_num: %d - %d",
3425 frame_number_valid,
3426 first_frame_number, last_frame_number);
3427
3428 }
3429 pthread_mutex_unlock(&mMutex);
3430
3431 if (urgent_frame_number_valid || frame_number_valid) {
3432 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3433 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3434 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3435 urgentFrameNumDiff, last_urgent_frame_number);
3436 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3437 LOGE("frameNumDiff: %d frameNum: %d",
3438 frameNumDiff, last_frame_number);
3439 }
3440
3441 for (size_t i = 0; i < loopCount; i++) {
3442 /* handleMetadataWithLock is called even for invalid_metadata for
3443 * pipeline depth calculation */
3444 if (!invalid_metadata) {
3445 /* Infer frame number. Batch metadata contains frame number of the
3446 * last frame */
3447 if (urgent_frame_number_valid) {
3448 if (i < urgentFrameNumDiff) {
3449 urgent_frame_number =
3450 first_urgent_frame_number + i;
3451 LOGD("inferred urgent frame_number: %d",
3452 urgent_frame_number);
3453 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3454 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3455 } else {
3456 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3457 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3458 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3459 }
3460 }
3461
3462 /* Infer frame number. Batch metadata contains frame number of the
3463 * last frame */
3464 if (frame_number_valid) {
3465 if (i < frameNumDiff) {
3466 frame_number = first_frame_number + i;
3467 LOGD("inferred frame_number: %d", frame_number);
3468 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3469 CAM_INTF_META_FRAME_NUMBER, frame_number);
3470 } else {
3471 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3472 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3473 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3474 }
3475 }
3476
3477 if (last_frame_capture_time) {
3478 //Infer timestamp
3479 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003480 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003481 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003482 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003483 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3484 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3485 LOGD("batch capture_time: %lld, capture_time: %lld",
3486 last_frame_capture_time, capture_time);
3487 }
3488 }
3489 pthread_mutex_lock(&mMutex);
3490 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003491 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003492 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3493 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003494 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 pthread_mutex_unlock(&mMutex);
3496 }
3497
3498 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003499 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003500 mMetadataChannel->bufDone(metadata_buf);
3501 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003502 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003503 }
3504}
3505
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003506void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3507 camera3_error_msg_code_t errorCode)
3508{
3509 camera3_notify_msg_t notify_msg;
3510 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3511 notify_msg.type = CAMERA3_MSG_ERROR;
3512 notify_msg.message.error.error_code = errorCode;
3513 notify_msg.message.error.error_stream = NULL;
3514 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003515 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003516
3517 return;
3518}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003519
3520/*===========================================================================
3521 * FUNCTION : sendPartialMetadataWithLock
3522 *
3523 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3524 *
3525 * PARAMETERS : @metadata: metadata buffer
3526 * @requestIter: The iterator for the pending capture request for
3527 * which the partial result is being sen
3528 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3529 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003530 * @isJumpstartMetadata: Whether this is a partial metadata for
3531 * jumpstart, i.e. even though it doesn't map to a valid partial
3532 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003533 *
3534 * RETURN :
3535 *
3536 *==========================================================================*/
3537
3538void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3539 metadata_buffer_t *metadata,
3540 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003541 bool lastUrgentMetadataInBatch,
3542 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003543{
3544 camera3_capture_result_t result;
3545 memset(&result, 0, sizeof(camera3_capture_result_t));
3546
3547 requestIter->partial_result_cnt++;
3548
3549 // Extract 3A metadata
3550 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003551 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3552 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003553 // Populate metadata result
3554 result.frame_number = requestIter->frame_number;
3555 result.num_output_buffers = 0;
3556 result.output_buffers = NULL;
3557 result.partial_result = requestIter->partial_result_cnt;
3558
3559 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003560 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003561 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3562 // Notify HDR+ client about the partial metadata.
3563 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3564 result.partial_result == PARTIAL_RESULT_COUNT);
3565 }
3566 }
3567
3568 orchestrateResult(&result);
3569 LOGD("urgent frame_number = %u", result.frame_number);
3570 free_camera_metadata((camera_metadata_t *)result.result);
3571}
3572
Thierry Strudel3d639192016-09-09 11:52:26 -07003573/*===========================================================================
3574 * FUNCTION : handleMetadataWithLock
3575 *
3576 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3577 *
3578 * PARAMETERS : @metadata_buf: metadata buffer
3579 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3580 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003581 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3582 * last urgent metadata in a batch. Always true for non-batch mode
3583 * @lastMetadataInBatch: Boolean to indicate whether this is the
3584 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003585 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3586 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003587 *
3588 * RETURN :
3589 *
3590 *==========================================================================*/
3591void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003592 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003593 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3594 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003595{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003596 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003597 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3598 //during flush do not send metadata from this thread
3599 LOGD("not sending metadata during flush or when mState is error");
3600 if (free_and_bufdone_meta_buf) {
3601 mMetadataChannel->bufDone(metadata_buf);
3602 free(metadata_buf);
3603 }
3604 return;
3605 }
3606
3607 //not in flush
3608 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3609 int32_t frame_number_valid, urgent_frame_number_valid;
3610 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003611 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003612 nsecs_t currentSysTime;
3613
3614 int32_t *p_frame_number_valid =
3615 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3616 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3617 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003618 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003619 int32_t *p_urgent_frame_number_valid =
3620 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3621 uint32_t *p_urgent_frame_number =
3622 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3623 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3624 metadata) {
3625 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3626 *p_frame_number_valid, *p_frame_number);
3627 }
3628
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003629 camera_metadata_t *resultMetadata = nullptr;
3630
Thierry Strudel3d639192016-09-09 11:52:26 -07003631 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3632 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3633 LOGE("Invalid metadata");
3634 if (free_and_bufdone_meta_buf) {
3635 mMetadataChannel->bufDone(metadata_buf);
3636 free(metadata_buf);
3637 }
3638 goto done_metadata;
3639 }
3640 frame_number_valid = *p_frame_number_valid;
3641 frame_number = *p_frame_number;
3642 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003643 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003644 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3645 urgent_frame_number = *p_urgent_frame_number;
3646 currentSysTime = systemTime(CLOCK_MONOTONIC);
3647
Jason Lee603176d2017-05-31 11:43:27 -07003648 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3649 const int tries = 3;
3650 nsecs_t bestGap, measured;
3651 for (int i = 0; i < tries; ++i) {
3652 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3653 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3654 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3655 const nsecs_t gap = tmono2 - tmono;
3656 if (i == 0 || gap < bestGap) {
3657 bestGap = gap;
3658 measured = tbase - ((tmono + tmono2) >> 1);
3659 }
3660 }
3661 capture_time -= measured;
3662 }
3663
Thierry Strudel3d639192016-09-09 11:52:26 -07003664 // Detect if buffers from any requests are overdue
3665 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003666 int64_t timeout;
3667 {
3668 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3669 // If there is a pending HDR+ request, the following requests may be blocked until the
3670 // HDR+ request is done. So allow a longer timeout.
3671 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3672 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003673 if (timeout < mExpectedInflightDuration) {
3674 timeout = mExpectedInflightDuration;
3675 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003676 }
3677
3678 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003679 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003680 assert(missed.stream->priv);
3681 if (missed.stream->priv) {
3682 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3683 assert(ch->mStreams[0]);
3684 if (ch->mStreams[0]) {
3685 LOGE("Cancel missing frame = %d, buffer = %p,"
3686 "stream type = %d, stream format = %d",
3687 req.frame_number, missed.buffer,
3688 ch->mStreams[0]->getMyType(), missed.stream->format);
3689 ch->timeoutFrame(req.frame_number);
3690 }
3691 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003692 }
3693 }
3694 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003695 //For the very first metadata callback, regardless whether it contains valid
3696 //frame number, send the partial metadata for the jumpstarting requests.
3697 //Note that this has to be done even if the metadata doesn't contain valid
3698 //urgent frame number, because in the case only 1 request is ever submitted
3699 //to HAL, there won't be subsequent valid urgent frame number.
3700 if (mFirstMetadataCallback) {
3701 for (pendingRequestIterator i =
3702 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3703 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003704 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3705 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003706 }
3707 }
3708 mFirstMetadataCallback = false;
3709 }
3710
Thierry Strudel3d639192016-09-09 11:52:26 -07003711 //Partial result on process_capture_result for timestamp
3712 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003713 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003714
3715 //Recieved an urgent Frame Number, handle it
3716 //using partial results
3717 for (pendingRequestIterator i =
3718 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3719 LOGD("Iterator Frame = %d urgent frame = %d",
3720 i->frame_number, urgent_frame_number);
3721
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003722 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003723 (i->partial_result_cnt == 0)) {
3724 LOGE("Error: HAL missed urgent metadata for frame number %d",
3725 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003726 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003727 }
3728
3729 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003730 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003731 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3732 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003733 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3734 // Instant AEC settled for this frame.
3735 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3736 mInstantAECSettledFrameNumber = urgent_frame_number;
3737 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003738 break;
3739 }
3740 }
3741 }
3742
3743 if (!frame_number_valid) {
3744 LOGD("Not a valid normal frame number, used as SOF only");
3745 if (free_and_bufdone_meta_buf) {
3746 mMetadataChannel->bufDone(metadata_buf);
3747 free(metadata_buf);
3748 }
3749 goto done_metadata;
3750 }
3751 LOGH("valid frame_number = %u, capture_time = %lld",
3752 frame_number, capture_time);
3753
Emilian Peev4e0fe952017-06-30 12:40:09 -07003754 handleDepthDataLocked(metadata->depth_data, frame_number,
3755 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003756
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003757 // Check whether any stream buffer corresponding to this is dropped or not
3758 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3759 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3760 for (auto & pendingRequest : mPendingRequestsList) {
3761 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3762 mInstantAECSettledFrameNumber)) {
3763 camera3_notify_msg_t notify_msg = {};
3764 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003765 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003766 QCamera3ProcessingChannel *channel =
3767 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003768 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003769 if (p_cam_frame_drop) {
3770 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003771 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003772 // Got the stream ID for drop frame.
3773 dropFrame = true;
3774 break;
3775 }
3776 }
3777 } else {
3778 // This is instant AEC case.
3779 // For instant AEC drop the stream untill AEC is settled.
3780 dropFrame = true;
3781 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003782
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003783 if (dropFrame) {
3784 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3785 if (p_cam_frame_drop) {
3786 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003787 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003788 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003789 } else {
3790 // For instant AEC, inform frame drop and frame number
3791 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3792 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003793 pendingRequest.frame_number, streamID,
3794 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003795 }
3796 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003797 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003798 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003799 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003800 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003801 if (p_cam_frame_drop) {
3802 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003803 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003804 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003805 } else {
3806 // For instant AEC, inform frame drop and frame number
3807 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3808 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003809 pendingRequest.frame_number, streamID,
3810 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003811 }
3812 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003813 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003814 PendingFrameDrop.stream_ID = streamID;
3815 // Add the Frame drop info to mPendingFrameDropList
3816 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003817 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003818 }
3819 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003820 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003821
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003822 for (auto & pendingRequest : mPendingRequestsList) {
3823 // Find the pending request with the frame number.
3824 if (pendingRequest.frame_number == frame_number) {
3825 // Update the sensor timestamp.
3826 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003827
Thierry Strudel3d639192016-09-09 11:52:26 -07003828
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003829 /* Set the timestamp in display metadata so that clients aware of
3830 private_handle such as VT can use this un-modified timestamps.
3831 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003832 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003833
Thierry Strudel3d639192016-09-09 11:52:26 -07003834 // Find channel requiring metadata, meaning internal offline postprocess
3835 // is needed.
3836 //TODO: for now, we don't support two streams requiring metadata at the same time.
3837 // (because we are not making copies, and metadata buffer is not reference counted.
3838 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003839 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3840 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003841 if (iter->need_metadata) {
3842 internalPproc = true;
3843 QCamera3ProcessingChannel *channel =
3844 (QCamera3ProcessingChannel *)iter->stream->priv;
3845 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003846 if(p_is_metabuf_queued != NULL) {
3847 *p_is_metabuf_queued = true;
3848 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003849 break;
3850 }
3851 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003852 for (auto itr = pendingRequest.internalRequestList.begin();
3853 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003854 if (itr->need_metadata) {
3855 internalPproc = true;
3856 QCamera3ProcessingChannel *channel =
3857 (QCamera3ProcessingChannel *)itr->stream->priv;
3858 channel->queueReprocMetadata(metadata_buf);
3859 break;
3860 }
3861 }
3862
Thierry Strudel54dc9782017-02-15 12:12:10 -08003863 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003864
3865 bool *enableZsl = nullptr;
3866 if (gExposeEnableZslKey) {
3867 enableZsl = &pendingRequest.enableZsl;
3868 }
3869
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003870 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003871 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003872 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003873
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003874 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003875
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003876 if (pendingRequest.blob_request) {
3877 //Dump tuning metadata if enabled and available
3878 char prop[PROPERTY_VALUE_MAX];
3879 memset(prop, 0, sizeof(prop));
3880 property_get("persist.camera.dumpmetadata", prop, "0");
3881 int32_t enabled = atoi(prop);
3882 if (enabled && metadata->is_tuning_params_valid) {
3883 dumpMetadataToFile(metadata->tuning_params,
3884 mMetaFrameCount,
3885 enabled,
3886 "Snapshot",
3887 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003888 }
3889 }
3890
3891 if (!internalPproc) {
3892 LOGD("couldn't find need_metadata for this metadata");
3893 // Return metadata buffer
3894 if (free_and_bufdone_meta_buf) {
3895 mMetadataChannel->bufDone(metadata_buf);
3896 free(metadata_buf);
3897 }
3898 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003899
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003900 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003901 }
3902 }
3903
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003904 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3905
3906 // Try to send out capture result metadata.
3907 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003908 return;
3909
Thierry Strudel3d639192016-09-09 11:52:26 -07003910done_metadata:
3911 for (pendingRequestIterator i = mPendingRequestsList.begin();
3912 i != mPendingRequestsList.end() ;i++) {
3913 i->pipeline_depth++;
3914 }
3915 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3916 unblockRequestIfNecessary();
3917}
3918
3919/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003920 * FUNCTION : handleDepthDataWithLock
3921 *
3922 * DESCRIPTION: Handles incoming depth data
3923 *
3924 * PARAMETERS : @depthData : Depth data
3925 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003926 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003927 *
3928 * RETURN :
3929 *
3930 *==========================================================================*/
3931void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003932 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003933 uint32_t currentFrameNumber;
3934 buffer_handle_t *depthBuffer;
3935
3936 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003937 return;
3938 }
3939
3940 camera3_stream_buffer_t resultBuffer =
3941 {.acquire_fence = -1,
3942 .release_fence = -1,
3943 .status = CAMERA3_BUFFER_STATUS_OK,
3944 .buffer = nullptr,
3945 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003946 do {
3947 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3948 if (nullptr == depthBuffer) {
3949 break;
3950 }
3951
Emilian Peev7650c122017-01-19 08:24:33 -08003952 resultBuffer.buffer = depthBuffer;
3953 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003954 if (valid) {
3955 int32_t rc = mDepthChannel->populateDepthData(depthData,
3956 frameNumber);
3957 if (NO_ERROR != rc) {
3958 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3959 } else {
3960 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3961 }
Emilian Peev7650c122017-01-19 08:24:33 -08003962 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003963 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003964 }
3965 } else if (currentFrameNumber > frameNumber) {
3966 break;
3967 } else {
3968 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3969 {{currentFrameNumber, mDepthChannel->getStream(),
3970 CAMERA3_MSG_ERROR_BUFFER}}};
3971 orchestrateNotify(&notify_msg);
3972
3973 LOGE("Depth buffer for frame number: %d is missing "
3974 "returning back!", currentFrameNumber);
3975 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3976 }
3977 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003978 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003979 } while (currentFrameNumber < frameNumber);
3980}
3981
3982/*===========================================================================
3983 * FUNCTION : notifyErrorFoPendingDepthData
3984 *
3985 * DESCRIPTION: Returns error for any pending depth buffers
3986 *
3987 * PARAMETERS : depthCh - depth channel that needs to get flushed
3988 *
3989 * RETURN :
3990 *
3991 *==========================================================================*/
3992void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3993 QCamera3DepthChannel *depthCh) {
3994 uint32_t currentFrameNumber;
3995 buffer_handle_t *depthBuffer;
3996
3997 if (nullptr == depthCh) {
3998 return;
3999 }
4000
4001 camera3_notify_msg_t notify_msg =
4002 {.type = CAMERA3_MSG_ERROR,
4003 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4004 camera3_stream_buffer_t resultBuffer =
4005 {.acquire_fence = -1,
4006 .release_fence = -1,
4007 .buffer = nullptr,
4008 .stream = depthCh->getStream(),
4009 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004010
4011 while (nullptr !=
4012 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4013 depthCh->unmapBuffer(currentFrameNumber);
4014
4015 notify_msg.message.error.frame_number = currentFrameNumber;
4016 orchestrateNotify(&notify_msg);
4017
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004018 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004019 };
4020}
4021
4022/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004023 * FUNCTION : hdrPlusPerfLock
4024 *
4025 * DESCRIPTION: perf lock for HDR+ using custom intent
4026 *
4027 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4028 *
4029 * RETURN : None
4030 *
4031 *==========================================================================*/
4032void QCamera3HardwareInterface::hdrPlusPerfLock(
4033 mm_camera_super_buf_t *metadata_buf)
4034{
4035 if (NULL == metadata_buf) {
4036 LOGE("metadata_buf is NULL");
4037 return;
4038 }
4039 metadata_buffer_t *metadata =
4040 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4041 int32_t *p_frame_number_valid =
4042 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4043 uint32_t *p_frame_number =
4044 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4045
4046 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4047 LOGE("%s: Invalid metadata", __func__);
4048 return;
4049 }
4050
Wei Wang01385482017-08-03 10:49:34 -07004051 //acquire perf lock for 2 secs after the last HDR frame is captured
4052 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004053 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4054 if ((p_frame_number != NULL) &&
4055 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004056 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004057 }
4058 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004059}
4060
4061/*===========================================================================
4062 * FUNCTION : handleInputBufferWithLock
4063 *
4064 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4065 *
4066 * PARAMETERS : @frame_number: frame number of the input buffer
4067 *
4068 * RETURN :
4069 *
4070 *==========================================================================*/
4071void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4072{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004073 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004074 pendingRequestIterator i = mPendingRequestsList.begin();
4075 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4076 i++;
4077 }
4078 if (i != mPendingRequestsList.end() && i->input_buffer) {
4079 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004080 CameraMetadata settings;
4081 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4082 if(i->settings) {
4083 settings = i->settings;
4084 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4085 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004086 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004087 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004088 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004089 } else {
4090 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004091 }
4092
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004093 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4094 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4095 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004096
4097 camera3_capture_result result;
4098 memset(&result, 0, sizeof(camera3_capture_result));
4099 result.frame_number = frame_number;
4100 result.result = i->settings;
4101 result.input_buffer = i->input_buffer;
4102 result.partial_result = PARTIAL_RESULT_COUNT;
4103
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004104 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004105 LOGD("Input request metadata and input buffer frame_number = %u",
4106 i->frame_number);
4107 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004108
4109 // Dispatch result metadata that may be just unblocked by this reprocess result.
4110 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004111 } else {
4112 LOGE("Could not find input request for frame number %d", frame_number);
4113 }
4114}
4115
4116/*===========================================================================
4117 * FUNCTION : handleBufferWithLock
4118 *
4119 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4120 *
4121 * PARAMETERS : @buffer: image buffer for the callback
4122 * @frame_number: frame number of the image buffer
4123 *
4124 * RETURN :
4125 *
4126 *==========================================================================*/
4127void QCamera3HardwareInterface::handleBufferWithLock(
4128 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4129{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004130 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004131
4132 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4133 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4134 }
4135
Thierry Strudel3d639192016-09-09 11:52:26 -07004136 /* Nothing to be done during error state */
4137 if ((ERROR == mState) || (DEINIT == mState)) {
4138 return;
4139 }
4140 if (mFlushPerf) {
4141 handleBuffersDuringFlushLock(buffer);
4142 return;
4143 }
4144 //not in flush
4145 // If the frame number doesn't exist in the pending request list,
4146 // directly send the buffer to the frameworks, and update pending buffers map
4147 // Otherwise, book-keep the buffer.
4148 pendingRequestIterator i = mPendingRequestsList.begin();
4149 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4150 i++;
4151 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004152
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004153 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004154 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004155 // For a reprocessing request, try to send out result metadata.
4156 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004157 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004158 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004159
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004160 // Check if this frame was dropped.
4161 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4162 m != mPendingFrameDropList.end(); m++) {
4163 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4164 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4165 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4166 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4167 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4168 frame_number, streamID);
4169 m = mPendingFrameDropList.erase(m);
4170 break;
4171 }
4172 }
4173
4174 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4175 LOGH("result frame_number = %d, buffer = %p",
4176 frame_number, buffer->buffer);
4177
4178 mPendingBuffersMap.removeBuf(buffer->buffer);
4179 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4180
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004181 if (mPreviewStarted == false) {
4182 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4183 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004184 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4185
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004186 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4187 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4188 mPreviewStarted = true;
4189
4190 // Set power hint for preview
4191 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4192 }
4193 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004194}
4195
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004196void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004197 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004198{
4199 // Find the pending request for this result metadata.
4200 auto requestIter = mPendingRequestsList.begin();
4201 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4202 requestIter++;
4203 }
4204
4205 if (requestIter == mPendingRequestsList.end()) {
4206 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4207 return;
4208 }
4209
4210 // Update the result metadata
4211 requestIter->resultMetadata = resultMetadata;
4212
4213 // Check what type of request this is.
4214 bool liveRequest = false;
4215 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004216 // HDR+ request doesn't have partial results.
4217 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004218 } else if (requestIter->input_buffer != nullptr) {
4219 // Reprocessing request result is the same as settings.
4220 requestIter->resultMetadata = requestIter->settings;
4221 // Reprocessing request doesn't have partial results.
4222 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4223 } else {
4224 liveRequest = true;
Chien-Yu Chen0a921f92017-08-27 17:25:33 -07004225 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004226 mPendingLiveRequest--;
4227
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004228 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004229 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004230 // For a live request, send the metadata to HDR+ client.
4231 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4232 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4233 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4234 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004235 }
4236 }
4237
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004238 // Remove len shading map if it's not requested.
4239 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4240 CameraMetadata metadata;
4241 metadata.acquire(resultMetadata);
4242 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4243 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4244 &requestIter->requestedLensShadingMapMode, 1);
4245
4246 requestIter->resultMetadata = metadata.release();
4247 }
4248
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004249 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4250}
4251
4252void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4253 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004254 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4255 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004256 bool readyToSend = true;
4257
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004258 // Iterate through the pending requests to send out result metadata that are ready. Also if
4259 // this result metadata belongs to a live request, notify errors for previous live requests
4260 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004261 auto iter = mPendingRequestsList.begin();
4262 while (iter != mPendingRequestsList.end()) {
4263 // Check if current pending request is ready. If it's not ready, the following pending
4264 // requests are also not ready.
4265 if (readyToSend && iter->resultMetadata == nullptr) {
4266 readyToSend = false;
4267 }
4268
4269 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4270
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004271 camera3_capture_result_t result = {};
4272 result.frame_number = iter->frame_number;
4273 result.result = iter->resultMetadata;
4274 result.partial_result = iter->partial_result_cnt;
4275
4276 // If this pending buffer has result metadata, we may be able to send out shutter callback
4277 // and result metadata.
4278 if (iter->resultMetadata != nullptr) {
4279 if (!readyToSend) {
4280 // If any of the previous pending request is not ready, this pending request is
4281 // also not ready to send in order to keep shutter callbacks and result metadata
4282 // in order.
4283 iter++;
4284 continue;
4285 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004286 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004287 // If the result metadata belongs to a live request, notify errors for previous pending
4288 // live requests.
4289 mPendingLiveRequest--;
4290
4291 CameraMetadata dummyMetadata;
4292 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4293 result.result = dummyMetadata.release();
4294
4295 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004296
4297 // partial_result should be PARTIAL_RESULT_CNT in case of
4298 // ERROR_RESULT.
4299 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4300 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004301 } else {
4302 iter++;
4303 continue;
4304 }
4305
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004306 result.output_buffers = nullptr;
4307 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004308 orchestrateResult(&result);
4309
4310 // For reprocessing, result metadata is the same as settings so do not free it here to
4311 // avoid double free.
4312 if (result.result != iter->settings) {
4313 free_camera_metadata((camera_metadata_t *)result.result);
4314 }
4315 iter->resultMetadata = nullptr;
4316 iter = erasePendingRequest(iter);
4317 }
4318
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004319 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004320 for (auto &iter : mPendingRequestsList) {
4321 // Increment pipeline depth for the following pending requests.
4322 if (iter.frame_number > frameNumber) {
4323 iter.pipeline_depth++;
4324 }
4325 }
4326 }
4327
4328 unblockRequestIfNecessary();
4329}
4330
Thierry Strudel3d639192016-09-09 11:52:26 -07004331/*===========================================================================
4332 * FUNCTION : unblockRequestIfNecessary
4333 *
4334 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4335 * that mMutex is held when this function is called.
4336 *
4337 * PARAMETERS :
4338 *
4339 * RETURN :
4340 *
4341 *==========================================================================*/
4342void QCamera3HardwareInterface::unblockRequestIfNecessary()
4343{
4344 // Unblock process_capture_request
4345 pthread_cond_signal(&mRequestCond);
4346}
4347
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004348/*===========================================================================
4349 * FUNCTION : isHdrSnapshotRequest
4350 *
4351 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4352 *
4353 * PARAMETERS : camera3 request structure
4354 *
4355 * RETURN : boolean decision variable
4356 *
4357 *==========================================================================*/
4358bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4359{
4360 if (request == NULL) {
4361 LOGE("Invalid request handle");
4362 assert(0);
4363 return false;
4364 }
4365
4366 if (!mForceHdrSnapshot) {
4367 CameraMetadata frame_settings;
4368 frame_settings = request->settings;
4369
4370 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4371 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4372 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4373 return false;
4374 }
4375 } else {
4376 return false;
4377 }
4378
4379 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4380 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4381 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4382 return false;
4383 }
4384 } else {
4385 return false;
4386 }
4387 }
4388
4389 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4390 if (request->output_buffers[i].stream->format
4391 == HAL_PIXEL_FORMAT_BLOB) {
4392 return true;
4393 }
4394 }
4395
4396 return false;
4397}
4398/*===========================================================================
4399 * FUNCTION : orchestrateRequest
4400 *
4401 * DESCRIPTION: Orchestrates a capture request from camera service
4402 *
4403 * PARAMETERS :
4404 * @request : request from framework to process
4405 *
4406 * RETURN : Error status codes
4407 *
4408 *==========================================================================*/
4409int32_t QCamera3HardwareInterface::orchestrateRequest(
4410 camera3_capture_request_t *request)
4411{
4412
4413 uint32_t originalFrameNumber = request->frame_number;
4414 uint32_t originalOutputCount = request->num_output_buffers;
4415 const camera_metadata_t *original_settings = request->settings;
4416 List<InternalRequest> internallyRequestedStreams;
4417 List<InternalRequest> emptyInternalList;
4418
4419 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4420 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4421 uint32_t internalFrameNumber;
4422 CameraMetadata modified_meta;
4423
4424
4425 /* Add Blob channel to list of internally requested streams */
4426 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4427 if (request->output_buffers[i].stream->format
4428 == HAL_PIXEL_FORMAT_BLOB) {
4429 InternalRequest streamRequested;
4430 streamRequested.meteringOnly = 1;
4431 streamRequested.need_metadata = 0;
4432 streamRequested.stream = request->output_buffers[i].stream;
4433 internallyRequestedStreams.push_back(streamRequested);
4434 }
4435 }
4436 request->num_output_buffers = 0;
4437 auto itr = internallyRequestedStreams.begin();
4438
4439 /* Modify setting to set compensation */
4440 modified_meta = request->settings;
4441 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4442 uint8_t aeLock = 1;
4443 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4444 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4445 camera_metadata_t *modified_settings = modified_meta.release();
4446 request->settings = modified_settings;
4447
4448 /* Capture Settling & -2x frame */
4449 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4450 request->frame_number = internalFrameNumber;
4451 processCaptureRequest(request, internallyRequestedStreams);
4452
4453 request->num_output_buffers = originalOutputCount;
4454 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4455 request->frame_number = internalFrameNumber;
4456 processCaptureRequest(request, emptyInternalList);
4457 request->num_output_buffers = 0;
4458
4459 modified_meta = modified_settings;
4460 expCompensation = 0;
4461 aeLock = 1;
4462 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4463 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4464 modified_settings = modified_meta.release();
4465 request->settings = modified_settings;
4466
4467 /* Capture Settling & 0X frame */
4468
4469 itr = internallyRequestedStreams.begin();
4470 if (itr == internallyRequestedStreams.end()) {
4471 LOGE("Error Internally Requested Stream list is empty");
4472 assert(0);
4473 } else {
4474 itr->need_metadata = 0;
4475 itr->meteringOnly = 1;
4476 }
4477
4478 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4479 request->frame_number = internalFrameNumber;
4480 processCaptureRequest(request, internallyRequestedStreams);
4481
4482 itr = internallyRequestedStreams.begin();
4483 if (itr == internallyRequestedStreams.end()) {
4484 ALOGE("Error Internally Requested Stream list is empty");
4485 assert(0);
4486 } else {
4487 itr->need_metadata = 1;
4488 itr->meteringOnly = 0;
4489 }
4490
4491 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4492 request->frame_number = internalFrameNumber;
4493 processCaptureRequest(request, internallyRequestedStreams);
4494
4495 /* Capture 2X frame*/
4496 modified_meta = modified_settings;
4497 expCompensation = GB_HDR_2X_STEP_EV;
4498 aeLock = 1;
4499 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4500 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4501 modified_settings = modified_meta.release();
4502 request->settings = modified_settings;
4503
4504 itr = internallyRequestedStreams.begin();
4505 if (itr == internallyRequestedStreams.end()) {
4506 ALOGE("Error Internally Requested Stream list is empty");
4507 assert(0);
4508 } else {
4509 itr->need_metadata = 0;
4510 itr->meteringOnly = 1;
4511 }
4512 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4513 request->frame_number = internalFrameNumber;
4514 processCaptureRequest(request, internallyRequestedStreams);
4515
4516 itr = internallyRequestedStreams.begin();
4517 if (itr == internallyRequestedStreams.end()) {
4518 ALOGE("Error Internally Requested Stream list is empty");
4519 assert(0);
4520 } else {
4521 itr->need_metadata = 1;
4522 itr->meteringOnly = 0;
4523 }
4524
4525 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4526 request->frame_number = internalFrameNumber;
4527 processCaptureRequest(request, internallyRequestedStreams);
4528
4529
4530 /* Capture 2X on original streaming config*/
4531 internallyRequestedStreams.clear();
4532
4533 /* Restore original settings pointer */
4534 request->settings = original_settings;
4535 } else {
4536 uint32_t internalFrameNumber;
4537 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4538 request->frame_number = internalFrameNumber;
4539 return processCaptureRequest(request, internallyRequestedStreams);
4540 }
4541
4542 return NO_ERROR;
4543}
4544
4545/*===========================================================================
4546 * FUNCTION : orchestrateResult
4547 *
4548 * DESCRIPTION: Orchestrates a capture result to camera service
4549 *
4550 * PARAMETERS :
4551 * @request : request from framework to process
4552 *
4553 * RETURN :
4554 *
4555 *==========================================================================*/
4556void QCamera3HardwareInterface::orchestrateResult(
4557 camera3_capture_result_t *result)
4558{
4559 uint32_t frameworkFrameNumber;
4560 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4561 frameworkFrameNumber);
4562 if (rc != NO_ERROR) {
4563 LOGE("Cannot find translated frameworkFrameNumber");
4564 assert(0);
4565 } else {
4566 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004567 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004568 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004569 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004570 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4571 camera_metadata_entry_t entry;
4572 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4573 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004574 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004575 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4576 if (ret != OK)
4577 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004578 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004579 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004580 result->frame_number = frameworkFrameNumber;
4581 mCallbackOps->process_capture_result(mCallbackOps, result);
4582 }
4583 }
4584}
4585
4586/*===========================================================================
4587 * FUNCTION : orchestrateNotify
4588 *
4589 * DESCRIPTION: Orchestrates a notify to camera service
4590 *
4591 * PARAMETERS :
4592 * @request : request from framework to process
4593 *
4594 * RETURN :
4595 *
4596 *==========================================================================*/
4597void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4598{
4599 uint32_t frameworkFrameNumber;
4600 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004601 int32_t rc = NO_ERROR;
4602
4603 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004604 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004605
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004606 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004607 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4608 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4609 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004610 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004611 LOGE("Cannot find translated frameworkFrameNumber");
4612 assert(0);
4613 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004614 }
4615 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004616
4617 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4618 LOGD("Internal Request drop the notifyCb");
4619 } else {
4620 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4621 mCallbackOps->notify(mCallbackOps, notify_msg);
4622 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004623}
4624
4625/*===========================================================================
4626 * FUNCTION : FrameNumberRegistry
4627 *
4628 * DESCRIPTION: Constructor
4629 *
4630 * PARAMETERS :
4631 *
4632 * RETURN :
4633 *
4634 *==========================================================================*/
4635FrameNumberRegistry::FrameNumberRegistry()
4636{
4637 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4638}
4639
4640/*===========================================================================
4641 * FUNCTION : ~FrameNumberRegistry
4642 *
4643 * DESCRIPTION: Destructor
4644 *
4645 * PARAMETERS :
4646 *
4647 * RETURN :
4648 *
4649 *==========================================================================*/
4650FrameNumberRegistry::~FrameNumberRegistry()
4651{
4652}
4653
4654/*===========================================================================
4655 * FUNCTION : PurgeOldEntriesLocked
4656 *
4657 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4658 *
4659 * PARAMETERS :
4660 *
4661 * RETURN : NONE
4662 *
4663 *==========================================================================*/
4664void FrameNumberRegistry::purgeOldEntriesLocked()
4665{
4666 while (_register.begin() != _register.end()) {
4667 auto itr = _register.begin();
4668 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4669 _register.erase(itr);
4670 } else {
4671 return;
4672 }
4673 }
4674}
4675
4676/*===========================================================================
4677 * FUNCTION : allocStoreInternalFrameNumber
4678 *
4679 * DESCRIPTION: Method to note down a framework request and associate a new
4680 * internal request number against it
4681 *
4682 * PARAMETERS :
4683 * @fFrameNumber: Identifier given by framework
4684 * @internalFN : Output parameter which will have the newly generated internal
4685 * entry
4686 *
4687 * RETURN : Error code
4688 *
4689 *==========================================================================*/
4690int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4691 uint32_t &internalFrameNumber)
4692{
4693 Mutex::Autolock lock(mRegistryLock);
4694 internalFrameNumber = _nextFreeInternalNumber++;
4695 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4696 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4697 purgeOldEntriesLocked();
4698 return NO_ERROR;
4699}
4700
4701/*===========================================================================
4702 * FUNCTION : generateStoreInternalFrameNumber
4703 *
4704 * DESCRIPTION: Method to associate a new internal request number independent
4705 * of any associate with framework requests
4706 *
4707 * PARAMETERS :
4708 * @internalFrame#: Output parameter which will have the newly generated internal
4709 *
4710 *
4711 * RETURN : Error code
4712 *
4713 *==========================================================================*/
4714int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4715{
4716 Mutex::Autolock lock(mRegistryLock);
4717 internalFrameNumber = _nextFreeInternalNumber++;
4718 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4719 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4720 purgeOldEntriesLocked();
4721 return NO_ERROR;
4722}
4723
4724/*===========================================================================
4725 * FUNCTION : getFrameworkFrameNumber
4726 *
4727 * DESCRIPTION: Method to query the framework framenumber given an internal #
4728 *
4729 * PARAMETERS :
4730 * @internalFrame#: Internal reference
4731 * @frameworkframenumber: Output parameter holding framework frame entry
4732 *
4733 * RETURN : Error code
4734 *
4735 *==========================================================================*/
4736int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4737 uint32_t &frameworkFrameNumber)
4738{
4739 Mutex::Autolock lock(mRegistryLock);
4740 auto itr = _register.find(internalFrameNumber);
4741 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004742 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004743 return -ENOENT;
4744 }
4745
4746 frameworkFrameNumber = itr->second;
4747 purgeOldEntriesLocked();
4748 return NO_ERROR;
4749}
Thierry Strudel3d639192016-09-09 11:52:26 -07004750
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004751status_t QCamera3HardwareInterface::fillPbStreamConfig(
4752 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4753 QCamera3Channel *channel, uint32_t streamIndex) {
4754 if (config == nullptr) {
4755 LOGE("%s: config is null", __FUNCTION__);
4756 return BAD_VALUE;
4757 }
4758
4759 if (channel == nullptr) {
4760 LOGE("%s: channel is null", __FUNCTION__);
4761 return BAD_VALUE;
4762 }
4763
4764 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4765 if (stream == nullptr) {
4766 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4767 return NAME_NOT_FOUND;
4768 }
4769
4770 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4771 if (streamInfo == nullptr) {
4772 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4773 return NAME_NOT_FOUND;
4774 }
4775
4776 config->id = pbStreamId;
4777 config->image.width = streamInfo->dim.width;
4778 config->image.height = streamInfo->dim.height;
4779 config->image.padding = 0;
4780 config->image.format = pbStreamFormat;
4781
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004782 uint32_t totalPlaneSize = 0;
4783
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004784 // Fill plane information.
4785 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4786 pbcamera::PlaneConfiguration plane;
4787 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4788 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4789 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004790
4791 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004792 }
4793
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004794 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004795 return OK;
4796}
4797
Thierry Strudel3d639192016-09-09 11:52:26 -07004798/*===========================================================================
4799 * FUNCTION : processCaptureRequest
4800 *
4801 * DESCRIPTION: process a capture request from camera service
4802 *
4803 * PARAMETERS :
4804 * @request : request from framework to process
4805 *
4806 * RETURN :
4807 *
4808 *==========================================================================*/
4809int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004810 camera3_capture_request_t *request,
4811 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004812{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004813 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004814 int rc = NO_ERROR;
4815 int32_t request_id;
4816 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004817 bool isVidBufRequested = false;
4818 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004819 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004820
4821 pthread_mutex_lock(&mMutex);
4822
4823 // Validate current state
4824 switch (mState) {
4825 case CONFIGURED:
4826 case STARTED:
4827 /* valid state */
4828 break;
4829
4830 case ERROR:
4831 pthread_mutex_unlock(&mMutex);
4832 handleCameraDeviceError();
4833 return -ENODEV;
4834
4835 default:
4836 LOGE("Invalid state %d", mState);
4837 pthread_mutex_unlock(&mMutex);
4838 return -ENODEV;
4839 }
4840
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004841 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004842 if (rc != NO_ERROR) {
4843 LOGE("incoming request is not valid");
4844 pthread_mutex_unlock(&mMutex);
4845 return rc;
4846 }
4847
4848 meta = request->settings;
4849
4850 // For first capture request, send capture intent, and
4851 // stream on all streams
4852 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004853 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004854 // send an unconfigure to the backend so that the isp
4855 // resources are deallocated
4856 if (!mFirstConfiguration) {
4857 cam_stream_size_info_t stream_config_info;
4858 int32_t hal_version = CAM_HAL_V3;
4859 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4860 stream_config_info.buffer_info.min_buffers =
4861 MIN_INFLIGHT_REQUESTS;
4862 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004863 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004864 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004865 clear_metadata_buffer(mParameters);
4866 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4867 CAM_INTF_PARM_HAL_VERSION, hal_version);
4868 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4869 CAM_INTF_META_STREAM_INFO, stream_config_info);
4870 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4871 mParameters);
4872 if (rc < 0) {
4873 LOGE("set_parms for unconfigure failed");
4874 pthread_mutex_unlock(&mMutex);
4875 return rc;
4876 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004877
Thierry Strudel3d639192016-09-09 11:52:26 -07004878 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004879 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004880 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004881 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004882 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004883 property_get("persist.camera.is_type", is_type_value, "4");
4884 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4885 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4886 property_get("persist.camera.is_type_preview", is_type_value, "4");
4887 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4888 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004889
4890 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4891 int32_t hal_version = CAM_HAL_V3;
4892 uint8_t captureIntent =
4893 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4894 mCaptureIntent = captureIntent;
4895 clear_metadata_buffer(mParameters);
4896 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4897 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4898 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004899 if (mFirstConfiguration) {
4900 // configure instant AEC
4901 // Instant AEC is a session based parameter and it is needed only
4902 // once per complete session after open camera.
4903 // i.e. This is set only once for the first capture request, after open camera.
4904 setInstantAEC(meta);
4905 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004906 uint8_t fwkVideoStabMode=0;
4907 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4908 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4909 }
4910
Xue Tuecac74e2017-04-17 13:58:15 -07004911 // If EIS setprop is enabled then only turn it on for video/preview
4912 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004913 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004914 int32_t vsMode;
4915 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4916 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4917 rc = BAD_VALUE;
4918 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004919 LOGD("setEis %d", setEis);
4920 bool eis3Supported = false;
4921 size_t count = IS_TYPE_MAX;
4922 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4923 for (size_t i = 0; i < count; i++) {
4924 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4925 eis3Supported = true;
4926 break;
4927 }
4928 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004929
4930 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004931 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004932 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4933 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004934 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4935 is_type = isTypePreview;
4936 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4937 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4938 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004939 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004940 } else {
4941 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004942 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004943 } else {
4944 is_type = IS_TYPE_NONE;
4945 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004946 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004947 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004948 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4949 }
4950 }
4951
4952 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4953 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4954
Thierry Strudel54dc9782017-02-15 12:12:10 -08004955 //Disable tintless only if the property is set to 0
4956 memset(prop, 0, sizeof(prop));
4957 property_get("persist.camera.tintless.enable", prop, "1");
4958 int32_t tintless_value = atoi(prop);
4959
Thierry Strudel3d639192016-09-09 11:52:26 -07004960 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4961 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004962
Thierry Strudel3d639192016-09-09 11:52:26 -07004963 //Disable CDS for HFR mode or if DIS/EIS is on.
4964 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4965 //after every configure_stream
4966 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4967 (m_bIsVideo)) {
4968 int32_t cds = CAM_CDS_MODE_OFF;
4969 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4970 CAM_INTF_PARM_CDS_MODE, cds))
4971 LOGE("Failed to disable CDS for HFR mode");
4972
4973 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004974
4975 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4976 uint8_t* use_av_timer = NULL;
4977
4978 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004979 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004980 use_av_timer = &m_debug_avtimer;
4981 }
4982 else{
4983 use_av_timer =
4984 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004985 if (use_av_timer) {
4986 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4987 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004988 }
4989
4990 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4991 rc = BAD_VALUE;
4992 }
4993 }
4994
Thierry Strudel3d639192016-09-09 11:52:26 -07004995 setMobicat();
4996
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004997 uint8_t nrMode = 0;
4998 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4999 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5000 }
5001
Thierry Strudel3d639192016-09-09 11:52:26 -07005002 /* Set fps and hfr mode while sending meta stream info so that sensor
5003 * can configure appropriate streaming mode */
5004 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005005 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5006 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005007 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5008 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005009 if (rc == NO_ERROR) {
5010 int32_t max_fps =
5011 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005012 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005013 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5014 }
5015 /* For HFR, more buffers are dequeued upfront to improve the performance */
5016 if (mBatchSize) {
5017 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5018 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5019 }
5020 }
5021 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005022 LOGE("setHalFpsRange failed");
5023 }
5024 }
5025 if (meta.exists(ANDROID_CONTROL_MODE)) {
5026 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5027 rc = extractSceneMode(meta, metaMode, mParameters);
5028 if (rc != NO_ERROR) {
5029 LOGE("extractSceneMode failed");
5030 }
5031 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005032 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005033
Thierry Strudel04e026f2016-10-10 11:27:36 -07005034 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5035 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5036 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5037 rc = setVideoHdrMode(mParameters, vhdr);
5038 if (rc != NO_ERROR) {
5039 LOGE("setVideoHDR is failed");
5040 }
5041 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005042
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005043 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005044 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005045 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005046 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5047 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5048 sensorModeFullFov)) {
5049 rc = BAD_VALUE;
5050 }
5051 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005052 //TODO: validate the arguments, HSV scenemode should have only the
5053 //advertised fps ranges
5054
5055 /*set the capture intent, hal version, tintless, stream info,
5056 *and disenable parameters to the backend*/
5057 LOGD("set_parms META_STREAM_INFO " );
5058 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005059 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5060 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005061 mStreamConfigInfo.type[i],
5062 mStreamConfigInfo.stream_sizes[i].width,
5063 mStreamConfigInfo.stream_sizes[i].height,
5064 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005065 mStreamConfigInfo.format[i],
5066 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005067 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005068
Thierry Strudel3d639192016-09-09 11:52:26 -07005069 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5070 mParameters);
5071 if (rc < 0) {
5072 LOGE("set_parms failed for hal version, stream info");
5073 }
5074
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005075 cam_sensor_mode_info_t sensorModeInfo = {};
5076 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005077 if (rc != NO_ERROR) {
5078 LOGE("Failed to get sensor output size");
5079 pthread_mutex_unlock(&mMutex);
5080 goto error_exit;
5081 }
5082
5083 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5084 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005085 sensorModeInfo.active_array_size.width,
5086 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005087
5088 /* Set batchmode before initializing channel. Since registerBuffer
5089 * internally initializes some of the channels, better set batchmode
5090 * even before first register buffer */
5091 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5092 it != mStreamInfo.end(); it++) {
5093 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5094 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5095 && mBatchSize) {
5096 rc = channel->setBatchSize(mBatchSize);
5097 //Disable per frame map unmap for HFR/batchmode case
5098 rc |= channel->setPerFrameMapUnmap(false);
5099 if (NO_ERROR != rc) {
5100 LOGE("Channel init failed %d", rc);
5101 pthread_mutex_unlock(&mMutex);
5102 goto error_exit;
5103 }
5104 }
5105 }
5106
5107 //First initialize all streams
5108 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5109 it != mStreamInfo.end(); it++) {
5110 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005111
5112 /* Initial value of NR mode is needed before stream on */
5113 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005114 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5115 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005116 setEis) {
5117 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5118 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5119 is_type = mStreamConfigInfo.is_type[i];
5120 break;
5121 }
5122 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005123 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005124 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005125 rc = channel->initialize(IS_TYPE_NONE);
5126 }
5127 if (NO_ERROR != rc) {
5128 LOGE("Channel initialization failed %d", rc);
5129 pthread_mutex_unlock(&mMutex);
5130 goto error_exit;
5131 }
5132 }
5133
5134 if (mRawDumpChannel) {
5135 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5136 if (rc != NO_ERROR) {
5137 LOGE("Error: Raw Dump Channel init failed");
5138 pthread_mutex_unlock(&mMutex);
5139 goto error_exit;
5140 }
5141 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005142 if (mHdrPlusRawSrcChannel) {
5143 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5144 if (rc != NO_ERROR) {
5145 LOGE("Error: HDR+ RAW Source Channel init failed");
5146 pthread_mutex_unlock(&mMutex);
5147 goto error_exit;
5148 }
5149 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005150 if (mSupportChannel) {
5151 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5152 if (rc < 0) {
5153 LOGE("Support channel initialization failed");
5154 pthread_mutex_unlock(&mMutex);
5155 goto error_exit;
5156 }
5157 }
5158 if (mAnalysisChannel) {
5159 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5160 if (rc < 0) {
5161 LOGE("Analysis channel initialization failed");
5162 pthread_mutex_unlock(&mMutex);
5163 goto error_exit;
5164 }
5165 }
5166 if (mDummyBatchChannel) {
5167 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5168 if (rc < 0) {
5169 LOGE("mDummyBatchChannel setBatchSize failed");
5170 pthread_mutex_unlock(&mMutex);
5171 goto error_exit;
5172 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005173 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005174 if (rc < 0) {
5175 LOGE("mDummyBatchChannel initialization failed");
5176 pthread_mutex_unlock(&mMutex);
5177 goto error_exit;
5178 }
5179 }
5180
5181 // Set bundle info
5182 rc = setBundleInfo();
5183 if (rc < 0) {
5184 LOGE("setBundleInfo failed %d", rc);
5185 pthread_mutex_unlock(&mMutex);
5186 goto error_exit;
5187 }
5188
5189 //update settings from app here
5190 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5191 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5192 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5193 }
5194 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5195 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5196 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5197 }
5198 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5199 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5200 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5201
5202 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5203 (mLinkedCameraId != mCameraId) ) {
5204 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5205 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005206 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005207 goto error_exit;
5208 }
5209 }
5210
5211 // add bundle related cameras
5212 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5213 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005214 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5215 &m_pDualCamCmdPtr->bundle_info;
5216 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005217 if (mIsDeviceLinked)
5218 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5219 else
5220 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5221
5222 pthread_mutex_lock(&gCamLock);
5223
5224 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5225 LOGE("Dualcam: Invalid Session Id ");
5226 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005227 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005228 goto error_exit;
5229 }
5230
5231 if (mIsMainCamera == 1) {
5232 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5233 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005234 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005235 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005236 // related session id should be session id of linked session
5237 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5238 } else {
5239 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5240 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005241 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005242 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005243 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5244 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005245 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005246 pthread_mutex_unlock(&gCamLock);
5247
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005248 rc = mCameraHandle->ops->set_dual_cam_cmd(
5249 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005250 if (rc < 0) {
5251 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005252 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005253 goto error_exit;
5254 }
5255 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005256 goto no_error;
5257error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005258 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005259 return rc;
5260no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005261 mWokenUpByDaemon = false;
5262 mPendingLiveRequest = 0;
5263 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005264 }
5265
5266 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005267 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005268
5269 if (mFlushPerf) {
5270 //we cannot accept any requests during flush
5271 LOGE("process_capture_request cannot proceed during flush");
5272 pthread_mutex_unlock(&mMutex);
5273 return NO_ERROR; //should return an error
5274 }
5275
5276 if (meta.exists(ANDROID_REQUEST_ID)) {
5277 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5278 mCurrentRequestId = request_id;
5279 LOGD("Received request with id: %d", request_id);
5280 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5281 LOGE("Unable to find request id field, \
5282 & no previous id available");
5283 pthread_mutex_unlock(&mMutex);
5284 return NAME_NOT_FOUND;
5285 } else {
5286 LOGD("Re-using old request id");
5287 request_id = mCurrentRequestId;
5288 }
5289
5290 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5291 request->num_output_buffers,
5292 request->input_buffer,
5293 frameNumber);
5294 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005295 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005296 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005297 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005298 uint32_t snapshotStreamId = 0;
5299 for (size_t i = 0; i < request->num_output_buffers; i++) {
5300 const camera3_stream_buffer_t& output = request->output_buffers[i];
5301 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5302
Emilian Peev7650c122017-01-19 08:24:33 -08005303 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5304 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005305 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005306 blob_request = 1;
5307 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5308 }
5309
5310 if (output.acquire_fence != -1) {
5311 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5312 close(output.acquire_fence);
5313 if (rc != OK) {
5314 LOGE("sync wait failed %d", rc);
5315 pthread_mutex_unlock(&mMutex);
5316 return rc;
5317 }
5318 }
5319
Emilian Peev0f3c3162017-03-15 12:57:46 +00005320 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5321 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005322 depthRequestPresent = true;
5323 continue;
5324 }
5325
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005326 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005327 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005328
5329 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5330 isVidBufRequested = true;
5331 }
5332 }
5333
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005334 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5335 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5336 itr++) {
5337 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5338 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5339 channel->getStreamID(channel->getStreamTypeMask());
5340
5341 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5342 isVidBufRequested = true;
5343 }
5344 }
5345
Thierry Strudel3d639192016-09-09 11:52:26 -07005346 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005347 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005348 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005349 }
5350 if (blob_request && mRawDumpChannel) {
5351 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005352 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005353 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005354 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005355 }
5356
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005357 {
5358 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5359 // Request a RAW buffer if
5360 // 1. mHdrPlusRawSrcChannel is valid.
5361 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5362 // 3. There is no pending HDR+ request.
5363 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5364 mHdrPlusPendingRequests.size() == 0) {
5365 streamsArray.stream_request[streamsArray.num_streams].streamID =
5366 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5367 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5368 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005369 }
5370
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005371 //extract capture intent
5372 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5373 mCaptureIntent =
5374 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5375 }
5376
5377 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5378 mCacMode =
5379 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5380 }
5381
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005382 uint8_t requestedLensShadingMapMode;
5383 // Get the shading map mode.
5384 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5385 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5386 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5387 } else {
5388 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5389 }
5390
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005391 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005392 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005393
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005394 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005395 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005396 // If this request has a still capture intent, try to submit an HDR+ request.
5397 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5398 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5399 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5400 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005401 }
5402
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005403 if (hdrPlusRequest) {
5404 // For a HDR+ request, just set the frame parameters.
5405 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5406 if (rc < 0) {
5407 LOGE("fail to set frame parameters");
5408 pthread_mutex_unlock(&mMutex);
5409 return rc;
5410 }
5411 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005412 /* Parse the settings:
5413 * - For every request in NORMAL MODE
5414 * - For every request in HFR mode during preview only case
5415 * - For first request of every batch in HFR mode during video
5416 * recording. In batchmode the same settings except frame number is
5417 * repeated in each request of the batch.
5418 */
5419 if (!mBatchSize ||
5420 (mBatchSize && !isVidBufRequested) ||
5421 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005422 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005423 if (rc < 0) {
5424 LOGE("fail to set frame parameters");
5425 pthread_mutex_unlock(&mMutex);
5426 return rc;
5427 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005428
5429 {
5430 // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
5431 // will be reported in result metadata.
5432 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5433 if (mHdrPlusModeEnabled) {
5434 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5435 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5436 }
5437 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005438 }
5439 /* For batchMode HFR, setFrameParameters is not called for every
5440 * request. But only frame number of the latest request is parsed.
5441 * Keep track of first and last frame numbers in a batch so that
5442 * metadata for the frame numbers of batch can be duplicated in
5443 * handleBatchMetadta */
5444 if (mBatchSize) {
5445 if (!mToBeQueuedVidBufs) {
5446 //start of the batch
5447 mFirstFrameNumberInBatch = request->frame_number;
5448 }
5449 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5450 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5451 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005452 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005453 return BAD_VALUE;
5454 }
5455 }
5456 if (mNeedSensorRestart) {
5457 /* Unlock the mutex as restartSensor waits on the channels to be
5458 * stopped, which in turn calls stream callback functions -
5459 * handleBufferWithLock and handleMetadataWithLock */
5460 pthread_mutex_unlock(&mMutex);
5461 rc = dynamicUpdateMetaStreamInfo();
5462 if (rc != NO_ERROR) {
5463 LOGE("Restarting the sensor failed");
5464 return BAD_VALUE;
5465 }
5466 mNeedSensorRestart = false;
5467 pthread_mutex_lock(&mMutex);
5468 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005469 if(mResetInstantAEC) {
5470 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5471 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5472 mResetInstantAEC = false;
5473 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005474 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005475 if (request->input_buffer->acquire_fence != -1) {
5476 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5477 close(request->input_buffer->acquire_fence);
5478 if (rc != OK) {
5479 LOGE("input buffer sync wait failed %d", rc);
5480 pthread_mutex_unlock(&mMutex);
5481 return rc;
5482 }
5483 }
5484 }
5485
5486 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5487 mLastCustIntentFrmNum = frameNumber;
5488 }
5489 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005490 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005491 pendingRequestIterator latestRequest;
5492 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005493 pendingRequest.num_buffers = depthRequestPresent ?
5494 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005495 pendingRequest.request_id = request_id;
5496 pendingRequest.blob_request = blob_request;
5497 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005498 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005499 if (request->input_buffer) {
5500 pendingRequest.input_buffer =
5501 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5502 *(pendingRequest.input_buffer) = *(request->input_buffer);
5503 pInputBuffer = pendingRequest.input_buffer;
5504 } else {
5505 pendingRequest.input_buffer = NULL;
5506 pInputBuffer = NULL;
5507 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005508 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005509
5510 pendingRequest.pipeline_depth = 0;
5511 pendingRequest.partial_result_cnt = 0;
5512 extractJpegMetadata(mCurJpegMeta, request);
5513 pendingRequest.jpegMetadata = mCurJpegMeta;
5514 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005515 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005516 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
Shuzhen Wang77b049a2017-08-30 12:24:36 -07005517 pendingRequest.hybrid_ae_enable =
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005518 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5519 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005520
Samuel Ha68ba5172016-12-15 18:41:12 -08005521 /* DevCamDebug metadata processCaptureRequest */
5522 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5523 mDevCamDebugMetaEnable =
5524 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5525 }
5526 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5527 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005528
5529 //extract CAC info
5530 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5531 mCacMode =
5532 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5533 }
5534 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005535 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005536 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5537 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005538
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005539 // extract enableZsl info
5540 if (gExposeEnableZslKey) {
5541 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5542 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5543 mZslEnabled = pendingRequest.enableZsl;
5544 } else {
5545 pendingRequest.enableZsl = mZslEnabled;
5546 }
5547 }
5548
Thierry Strudel3d639192016-09-09 11:52:26 -07005549 PendingBuffersInRequest bufsForCurRequest;
5550 bufsForCurRequest.frame_number = frameNumber;
5551 // Mark current timestamp for the new request
5552 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005553 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005554
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005555 if (hdrPlusRequest) {
5556 // Save settings for this request.
5557 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5558 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5559
5560 // Add to pending HDR+ request queue.
5561 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5562 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5563
5564 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5565 }
5566
Thierry Strudel3d639192016-09-09 11:52:26 -07005567 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005568 if ((request->output_buffers[i].stream->data_space ==
5569 HAL_DATASPACE_DEPTH) &&
5570 (HAL_PIXEL_FORMAT_BLOB ==
5571 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005572 continue;
5573 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005574 RequestedBufferInfo requestedBuf;
5575 memset(&requestedBuf, 0, sizeof(requestedBuf));
5576 requestedBuf.stream = request->output_buffers[i].stream;
5577 requestedBuf.buffer = NULL;
5578 pendingRequest.buffers.push_back(requestedBuf);
5579
5580 // Add to buffer handle the pending buffers list
5581 PendingBufferInfo bufferInfo;
5582 bufferInfo.buffer = request->output_buffers[i].buffer;
5583 bufferInfo.stream = request->output_buffers[i].stream;
5584 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5585 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5586 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5587 frameNumber, bufferInfo.buffer,
5588 channel->getStreamTypeMask(), bufferInfo.stream->format);
5589 }
5590 // Add this request packet into mPendingBuffersMap
5591 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5592 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5593 mPendingBuffersMap.get_num_overall_buffers());
5594
5595 latestRequest = mPendingRequestsList.insert(
5596 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005597
5598 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5599 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005600 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005601 for (size_t i = 0; i < request->num_output_buffers; i++) {
5602 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5603 }
5604
Thierry Strudel3d639192016-09-09 11:52:26 -07005605 if(mFlush) {
5606 LOGI("mFlush is true");
5607 pthread_mutex_unlock(&mMutex);
5608 return NO_ERROR;
5609 }
5610
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005611 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5612 // channel.
5613 if (!hdrPlusRequest) {
5614 int indexUsed;
5615 // Notify metadata channel we receive a request
5616 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005617
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005618 if(request->input_buffer != NULL){
5619 LOGD("Input request, frame_number %d", frameNumber);
5620 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5621 if (NO_ERROR != rc) {
5622 LOGE("fail to set reproc parameters");
5623 pthread_mutex_unlock(&mMutex);
5624 return rc;
5625 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005626 }
5627
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005628 // Call request on other streams
5629 uint32_t streams_need_metadata = 0;
5630 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5631 for (size_t i = 0; i < request->num_output_buffers; i++) {
5632 const camera3_stream_buffer_t& output = request->output_buffers[i];
5633 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5634
5635 if (channel == NULL) {
5636 LOGW("invalid channel pointer for stream");
5637 continue;
5638 }
5639
5640 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5641 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5642 output.buffer, request->input_buffer, frameNumber);
5643 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005644 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005645 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5646 if (rc < 0) {
5647 LOGE("Fail to request on picture channel");
5648 pthread_mutex_unlock(&mMutex);
5649 return rc;
5650 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005651 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005652 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5653 assert(NULL != mDepthChannel);
5654 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005655
Emilian Peev7650c122017-01-19 08:24:33 -08005656 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5657 if (rc < 0) {
5658 LOGE("Fail to map on depth buffer");
5659 pthread_mutex_unlock(&mMutex);
5660 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005661 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005662 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005663 } else {
5664 LOGD("snapshot request with buffer %p, frame_number %d",
5665 output.buffer, frameNumber);
5666 if (!request->settings) {
5667 rc = channel->request(output.buffer, frameNumber,
5668 NULL, mPrevParameters, indexUsed);
5669 } else {
5670 rc = channel->request(output.buffer, frameNumber,
5671 NULL, mParameters, indexUsed);
5672 }
5673 if (rc < 0) {
5674 LOGE("Fail to request on picture channel");
5675 pthread_mutex_unlock(&mMutex);
5676 return rc;
5677 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005678
Emilian Peev7650c122017-01-19 08:24:33 -08005679 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5680 uint32_t j = 0;
5681 for (j = 0; j < streamsArray.num_streams; j++) {
5682 if (streamsArray.stream_request[j].streamID == streamId) {
5683 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5684 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5685 else
5686 streamsArray.stream_request[j].buf_index = indexUsed;
5687 break;
5688 }
5689 }
5690 if (j == streamsArray.num_streams) {
5691 LOGE("Did not find matching stream to update index");
5692 assert(0);
5693 }
5694
5695 pendingBufferIter->need_metadata = true;
5696 streams_need_metadata++;
5697 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005698 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005699 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5700 bool needMetadata = false;
5701 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5702 rc = yuvChannel->request(output.buffer, frameNumber,
5703 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5704 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005705 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005706 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005707 pthread_mutex_unlock(&mMutex);
5708 return rc;
5709 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005710
5711 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5712 uint32_t j = 0;
5713 for (j = 0; j < streamsArray.num_streams; j++) {
5714 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005715 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5716 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5717 else
5718 streamsArray.stream_request[j].buf_index = indexUsed;
5719 break;
5720 }
5721 }
5722 if (j == streamsArray.num_streams) {
5723 LOGE("Did not find matching stream to update index");
5724 assert(0);
5725 }
5726
5727 pendingBufferIter->need_metadata = needMetadata;
5728 if (needMetadata)
5729 streams_need_metadata += 1;
5730 LOGD("calling YUV channel request, need_metadata is %d",
5731 needMetadata);
5732 } else {
5733 LOGD("request with buffer %p, frame_number %d",
5734 output.buffer, frameNumber);
5735
5736 rc = channel->request(output.buffer, frameNumber, indexUsed);
5737
5738 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5739 uint32_t j = 0;
5740 for (j = 0; j < streamsArray.num_streams; j++) {
5741 if (streamsArray.stream_request[j].streamID == streamId) {
5742 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5743 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5744 else
5745 streamsArray.stream_request[j].buf_index = indexUsed;
5746 break;
5747 }
5748 }
5749 if (j == streamsArray.num_streams) {
5750 LOGE("Did not find matching stream to update index");
5751 assert(0);
5752 }
5753
5754 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5755 && mBatchSize) {
5756 mToBeQueuedVidBufs++;
5757 if (mToBeQueuedVidBufs == mBatchSize) {
5758 channel->queueBatchBuf();
5759 }
5760 }
5761 if (rc < 0) {
5762 LOGE("request failed");
5763 pthread_mutex_unlock(&mMutex);
5764 return rc;
5765 }
5766 }
5767 pendingBufferIter++;
5768 }
5769
5770 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5771 itr++) {
5772 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5773
5774 if (channel == NULL) {
5775 LOGE("invalid channel pointer for stream");
5776 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005777 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005778 return BAD_VALUE;
5779 }
5780
5781 InternalRequest requestedStream;
5782 requestedStream = (*itr);
5783
5784
5785 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5786 LOGD("snapshot request internally input buffer %p, frame_number %d",
5787 request->input_buffer, frameNumber);
5788 if(request->input_buffer != NULL){
5789 rc = channel->request(NULL, frameNumber,
5790 pInputBuffer, &mReprocMeta, indexUsed, true,
5791 requestedStream.meteringOnly);
5792 if (rc < 0) {
5793 LOGE("Fail to request on picture channel");
5794 pthread_mutex_unlock(&mMutex);
5795 return rc;
5796 }
5797 } else {
5798 LOGD("snapshot request with frame_number %d", frameNumber);
5799 if (!request->settings) {
5800 rc = channel->request(NULL, frameNumber,
5801 NULL, mPrevParameters, indexUsed, true,
5802 requestedStream.meteringOnly);
5803 } else {
5804 rc = channel->request(NULL, frameNumber,
5805 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5806 }
5807 if (rc < 0) {
5808 LOGE("Fail to request on picture channel");
5809 pthread_mutex_unlock(&mMutex);
5810 return rc;
5811 }
5812
5813 if ((*itr).meteringOnly != 1) {
5814 requestedStream.need_metadata = 1;
5815 streams_need_metadata++;
5816 }
5817 }
5818
5819 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5820 uint32_t j = 0;
5821 for (j = 0; j < streamsArray.num_streams; j++) {
5822 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005823 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5824 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5825 else
5826 streamsArray.stream_request[j].buf_index = indexUsed;
5827 break;
5828 }
5829 }
5830 if (j == streamsArray.num_streams) {
5831 LOGE("Did not find matching stream to update index");
5832 assert(0);
5833 }
5834
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005835 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005836 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005837 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005838 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005839 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005840 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005841 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005842 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005843
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005844 //If 2 streams have need_metadata set to true, fail the request, unless
5845 //we copy/reference count the metadata buffer
5846 if (streams_need_metadata > 1) {
5847 LOGE("not supporting request in which two streams requires"
5848 " 2 HAL metadata for reprocessing");
5849 pthread_mutex_unlock(&mMutex);
5850 return -EINVAL;
5851 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005852
Emilian Peev656e4fa2017-06-02 16:47:04 +01005853 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5854 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5855 if (depthRequestPresent && mDepthChannel) {
5856 if (request->settings) {
5857 camera_metadata_ro_entry entry;
5858 if (find_camera_metadata_ro_entry(request->settings,
5859 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5860 if (entry.data.u8[0]) {
5861 pdafEnable = CAM_PD_DATA_ENABLED;
5862 } else {
5863 pdafEnable = CAM_PD_DATA_SKIP;
5864 }
5865 mDepthCloudMode = pdafEnable;
5866 } else {
5867 pdafEnable = mDepthCloudMode;
5868 }
5869 } else {
5870 pdafEnable = mDepthCloudMode;
5871 }
5872 }
5873
Emilian Peev7650c122017-01-19 08:24:33 -08005874 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5875 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5876 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5877 pthread_mutex_unlock(&mMutex);
5878 return BAD_VALUE;
5879 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005880
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005881 if (request->input_buffer == NULL) {
5882 /* Set the parameters to backend:
5883 * - For every request in NORMAL MODE
5884 * - For every request in HFR mode during preview only case
5885 * - Once every batch in HFR mode during video recording
5886 */
5887 if (!mBatchSize ||
5888 (mBatchSize && !isVidBufRequested) ||
5889 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5890 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5891 mBatchSize, isVidBufRequested,
5892 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005893
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005894 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5895 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5896 uint32_t m = 0;
5897 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5898 if (streamsArray.stream_request[k].streamID ==
5899 mBatchedStreamsArray.stream_request[m].streamID)
5900 break;
5901 }
5902 if (m == mBatchedStreamsArray.num_streams) {
5903 mBatchedStreamsArray.stream_request\
5904 [mBatchedStreamsArray.num_streams].streamID =
5905 streamsArray.stream_request[k].streamID;
5906 mBatchedStreamsArray.stream_request\
5907 [mBatchedStreamsArray.num_streams].buf_index =
5908 streamsArray.stream_request[k].buf_index;
5909 mBatchedStreamsArray.num_streams =
5910 mBatchedStreamsArray.num_streams + 1;
5911 }
5912 }
5913 streamsArray = mBatchedStreamsArray;
5914 }
5915 /* Update stream id of all the requested buffers */
5916 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5917 streamsArray)) {
5918 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005919 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005920 return BAD_VALUE;
5921 }
5922
5923 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5924 mParameters);
5925 if (rc < 0) {
5926 LOGE("set_parms failed");
5927 }
5928 /* reset to zero coz, the batch is queued */
5929 mToBeQueuedVidBufs = 0;
5930 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5931 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5932 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005933 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5934 uint32_t m = 0;
5935 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5936 if (streamsArray.stream_request[k].streamID ==
5937 mBatchedStreamsArray.stream_request[m].streamID)
5938 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005939 }
5940 if (m == mBatchedStreamsArray.num_streams) {
5941 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5942 streamID = streamsArray.stream_request[k].streamID;
5943 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5944 buf_index = streamsArray.stream_request[k].buf_index;
5945 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5946 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005947 }
5948 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005949 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005950
5951 // Start all streams after the first setting is sent, so that the
5952 // setting can be applied sooner: (0 + apply_delay)th frame.
5953 if (mState == CONFIGURED && mChannelHandle) {
5954 //Then start them.
5955 LOGH("Start META Channel");
5956 rc = mMetadataChannel->start();
5957 if (rc < 0) {
5958 LOGE("META channel start failed");
5959 pthread_mutex_unlock(&mMutex);
5960 return rc;
5961 }
5962
5963 if (mAnalysisChannel) {
5964 rc = mAnalysisChannel->start();
5965 if (rc < 0) {
5966 LOGE("Analysis channel start failed");
5967 mMetadataChannel->stop();
5968 pthread_mutex_unlock(&mMutex);
5969 return rc;
5970 }
5971 }
5972
5973 if (mSupportChannel) {
5974 rc = mSupportChannel->start();
5975 if (rc < 0) {
5976 LOGE("Support channel start failed");
5977 mMetadataChannel->stop();
5978 /* Although support and analysis are mutually exclusive today
5979 adding it in anycase for future proofing */
5980 if (mAnalysisChannel) {
5981 mAnalysisChannel->stop();
5982 }
5983 pthread_mutex_unlock(&mMutex);
5984 return rc;
5985 }
5986 }
5987 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5988 it != mStreamInfo.end(); it++) {
5989 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5990 LOGH("Start Processing Channel mask=%d",
5991 channel->getStreamTypeMask());
5992 rc = channel->start();
5993 if (rc < 0) {
5994 LOGE("channel start failed");
5995 pthread_mutex_unlock(&mMutex);
5996 return rc;
5997 }
5998 }
5999
6000 if (mRawDumpChannel) {
6001 LOGD("Starting raw dump stream");
6002 rc = mRawDumpChannel->start();
6003 if (rc != NO_ERROR) {
6004 LOGE("Error Starting Raw Dump Channel");
6005 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6006 it != mStreamInfo.end(); it++) {
6007 QCamera3Channel *channel =
6008 (QCamera3Channel *)(*it)->stream->priv;
6009 LOGH("Stopping Processing Channel mask=%d",
6010 channel->getStreamTypeMask());
6011 channel->stop();
6012 }
6013 if (mSupportChannel)
6014 mSupportChannel->stop();
6015 if (mAnalysisChannel) {
6016 mAnalysisChannel->stop();
6017 }
6018 mMetadataChannel->stop();
6019 pthread_mutex_unlock(&mMutex);
6020 return rc;
6021 }
6022 }
6023
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006024 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006025 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006026 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006027 if (rc != NO_ERROR) {
6028 LOGE("start_channel failed %d", rc);
6029 pthread_mutex_unlock(&mMutex);
6030 return rc;
6031 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006032
6033 {
6034 // Configure Easel for stream on.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006035 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07006036
6037 // Now that sensor mode should have been selected, get the selected sensor mode
6038 // info.
6039 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6040 getCurrentSensorModeInfo(mSensorModeInfo);
6041
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006042 if (EaselManagerClientOpened) {
6043 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006044 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6045 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006046 if (rc != OK) {
6047 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6048 mCameraId, mSensorModeInfo.op_pixel_clk);
6049 pthread_mutex_unlock(&mMutex);
6050 return rc;
6051 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07006052 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006053 }
6054 }
6055
6056 // Start sensor streaming.
6057 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6058 mChannelHandle);
6059 if (rc != NO_ERROR) {
6060 LOGE("start_sensor_stream_on failed %d", rc);
6061 pthread_mutex_unlock(&mMutex);
6062 return rc;
6063 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006064 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006065 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006066 }
6067
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006068 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chenjie Luo4a761802017-06-13 17:35:54 +00006069 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006070 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006071 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006072 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6073 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6074 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6075 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006076
6077 if (isSessionHdrPlusModeCompatible()) {
6078 rc = enableHdrPlusModeLocked();
6079 if (rc != OK) {
6080 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6081 pthread_mutex_unlock(&mMutex);
6082 return rc;
6083 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006084 }
6085
6086 mFirstPreviewIntentSeen = true;
6087 }
6088 }
6089
Thierry Strudel3d639192016-09-09 11:52:26 -07006090 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6091
6092 mState = STARTED;
6093 // Added a timed condition wait
6094 struct timespec ts;
6095 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006096 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006097 if (rc < 0) {
6098 isValidTimeout = 0;
6099 LOGE("Error reading the real time clock!!");
6100 }
6101 else {
6102 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006103 int64_t timeout = 5;
6104 {
6105 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6106 // If there is a pending HDR+ request, the following requests may be blocked until the
6107 // HDR+ request is done. So allow a longer timeout.
6108 if (mHdrPlusPendingRequests.size() > 0) {
6109 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6110 }
6111 }
6112 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006113 }
6114 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006115 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006116 (mState != ERROR) && (mState != DEINIT)) {
6117 if (!isValidTimeout) {
6118 LOGD("Blocking on conditional wait");
6119 pthread_cond_wait(&mRequestCond, &mMutex);
6120 }
6121 else {
6122 LOGD("Blocking on timed conditional wait");
6123 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6124 if (rc == ETIMEDOUT) {
6125 rc = -ENODEV;
6126 LOGE("Unblocked on timeout!!!!");
6127 break;
6128 }
6129 }
6130 LOGD("Unblocked");
6131 if (mWokenUpByDaemon) {
6132 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006133 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006134 break;
6135 }
6136 }
6137 pthread_mutex_unlock(&mMutex);
6138
6139 return rc;
6140}
6141
6142/*===========================================================================
6143 * FUNCTION : dump
6144 *
6145 * DESCRIPTION:
6146 *
6147 * PARAMETERS :
6148 *
6149 *
6150 * RETURN :
6151 *==========================================================================*/
6152void QCamera3HardwareInterface::dump(int fd)
6153{
6154 pthread_mutex_lock(&mMutex);
6155 dprintf(fd, "\n Camera HAL3 information Begin \n");
6156
6157 dprintf(fd, "\nNumber of pending requests: %zu \n",
6158 mPendingRequestsList.size());
6159 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6160 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6161 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6162 for(pendingRequestIterator i = mPendingRequestsList.begin();
6163 i != mPendingRequestsList.end(); i++) {
6164 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6165 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6166 i->input_buffer);
6167 }
6168 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6169 mPendingBuffersMap.get_num_overall_buffers());
6170 dprintf(fd, "-------+------------------\n");
6171 dprintf(fd, " Frame | Stream type mask \n");
6172 dprintf(fd, "-------+------------------\n");
6173 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6174 for(auto &j : req.mPendingBufferList) {
6175 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6176 dprintf(fd, " %5d | %11d \n",
6177 req.frame_number, channel->getStreamTypeMask());
6178 }
6179 }
6180 dprintf(fd, "-------+------------------\n");
6181
6182 dprintf(fd, "\nPending frame drop list: %zu\n",
6183 mPendingFrameDropList.size());
6184 dprintf(fd, "-------+-----------\n");
6185 dprintf(fd, " Frame | Stream ID \n");
6186 dprintf(fd, "-------+-----------\n");
6187 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6188 i != mPendingFrameDropList.end(); i++) {
6189 dprintf(fd, " %5d | %9d \n",
6190 i->frame_number, i->stream_ID);
6191 }
6192 dprintf(fd, "-------+-----------\n");
6193
6194 dprintf(fd, "\n Camera HAL3 information End \n");
6195
6196 /* use dumpsys media.camera as trigger to send update debug level event */
6197 mUpdateDebugLevel = true;
6198 pthread_mutex_unlock(&mMutex);
6199 return;
6200}
6201
6202/*===========================================================================
6203 * FUNCTION : flush
6204 *
6205 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6206 * conditionally restarts channels
6207 *
6208 * PARAMETERS :
6209 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006210 * @ stopChannelImmediately: stop the channel immediately. This should be used
6211 * when device encountered an error and MIPI may has
6212 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006213 *
6214 * RETURN :
6215 * 0 on success
6216 * Error code on failure
6217 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006218int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006219{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006220 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006221 int32_t rc = NO_ERROR;
6222
6223 LOGD("Unblocking Process Capture Request");
6224 pthread_mutex_lock(&mMutex);
6225 mFlush = true;
6226 pthread_mutex_unlock(&mMutex);
6227
6228 rc = stopAllChannels();
6229 // unlink of dualcam
6230 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006231 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6232 &m_pDualCamCmdPtr->bundle_info;
6233 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006234 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6235 pthread_mutex_lock(&gCamLock);
6236
6237 if (mIsMainCamera == 1) {
6238 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6239 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006240 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006241 // related session id should be session id of linked session
6242 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6243 } else {
6244 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6245 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006246 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006247 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6248 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006249 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006250 pthread_mutex_unlock(&gCamLock);
6251
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006252 rc = mCameraHandle->ops->set_dual_cam_cmd(
6253 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006254 if (rc < 0) {
6255 LOGE("Dualcam: Unlink failed, but still proceed to close");
6256 }
6257 }
6258
6259 if (rc < 0) {
6260 LOGE("stopAllChannels failed");
6261 return rc;
6262 }
6263 if (mChannelHandle) {
6264 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006265 mChannelHandle, stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006266 }
6267
6268 // Reset bundle info
6269 rc = setBundleInfo();
6270 if (rc < 0) {
6271 LOGE("setBundleInfo failed %d", rc);
6272 return rc;
6273 }
6274
6275 // Mutex Lock
6276 pthread_mutex_lock(&mMutex);
6277
6278 // Unblock process_capture_request
6279 mPendingLiveRequest = 0;
6280 pthread_cond_signal(&mRequestCond);
6281
6282 rc = notifyErrorForPendingRequests();
6283 if (rc < 0) {
6284 LOGE("notifyErrorForPendingRequests failed");
6285 pthread_mutex_unlock(&mMutex);
6286 return rc;
6287 }
6288
6289 mFlush = false;
6290
6291 // Start the Streams/Channels
6292 if (restartChannels) {
6293 rc = startAllChannels();
6294 if (rc < 0) {
6295 LOGE("startAllChannels failed");
6296 pthread_mutex_unlock(&mMutex);
6297 return rc;
6298 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006299 if (mChannelHandle) {
6300 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006301 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006302 if (rc < 0) {
6303 LOGE("start_channel failed");
6304 pthread_mutex_unlock(&mMutex);
6305 return rc;
6306 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006307 }
6308 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006309 pthread_mutex_unlock(&mMutex);
6310
6311 return 0;
6312}
6313
6314/*===========================================================================
6315 * FUNCTION : flushPerf
6316 *
6317 * DESCRIPTION: This is the performance optimization version of flush that does
6318 * not use stream off, rather flushes the system
6319 *
6320 * PARAMETERS :
6321 *
6322 *
6323 * RETURN : 0 : success
6324 * -EINVAL: input is malformed (device is not valid)
6325 * -ENODEV: if the device has encountered a serious error
6326 *==========================================================================*/
6327int QCamera3HardwareInterface::flushPerf()
6328{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006329 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006330 int32_t rc = 0;
6331 struct timespec timeout;
6332 bool timed_wait = false;
6333
6334 pthread_mutex_lock(&mMutex);
6335 mFlushPerf = true;
6336 mPendingBuffersMap.numPendingBufsAtFlush =
6337 mPendingBuffersMap.get_num_overall_buffers();
6338 LOGD("Calling flush. Wait for %d buffers to return",
6339 mPendingBuffersMap.numPendingBufsAtFlush);
6340
6341 /* send the flush event to the backend */
6342 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6343 if (rc < 0) {
6344 LOGE("Error in flush: IOCTL failure");
6345 mFlushPerf = false;
6346 pthread_mutex_unlock(&mMutex);
6347 return -ENODEV;
6348 }
6349
6350 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6351 LOGD("No pending buffers in HAL, return flush");
6352 mFlushPerf = false;
6353 pthread_mutex_unlock(&mMutex);
6354 return rc;
6355 }
6356
6357 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006358 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006359 if (rc < 0) {
6360 LOGE("Error reading the real time clock, cannot use timed wait");
6361 } else {
6362 timeout.tv_sec += FLUSH_TIMEOUT;
6363 timed_wait = true;
6364 }
6365
6366 //Block on conditional variable
6367 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6368 LOGD("Waiting on mBuffersCond");
6369 if (!timed_wait) {
6370 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6371 if (rc != 0) {
6372 LOGE("pthread_cond_wait failed due to rc = %s",
6373 strerror(rc));
6374 break;
6375 }
6376 } else {
6377 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6378 if (rc != 0) {
6379 LOGE("pthread_cond_timedwait failed due to rc = %s",
6380 strerror(rc));
6381 break;
6382 }
6383 }
6384 }
6385 if (rc != 0) {
6386 mFlushPerf = false;
6387 pthread_mutex_unlock(&mMutex);
6388 return -ENODEV;
6389 }
6390
6391 LOGD("Received buffers, now safe to return them");
6392
6393 //make sure the channels handle flush
6394 //currently only required for the picture channel to release snapshot resources
6395 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6396 it != mStreamInfo.end(); it++) {
6397 QCamera3Channel *channel = (*it)->channel;
6398 if (channel) {
6399 rc = channel->flush();
6400 if (rc) {
6401 LOGE("Flushing the channels failed with error %d", rc);
6402 // even though the channel flush failed we need to continue and
6403 // return the buffers we have to the framework, however the return
6404 // value will be an error
6405 rc = -ENODEV;
6406 }
6407 }
6408 }
6409
6410 /* notify the frameworks and send errored results */
6411 rc = notifyErrorForPendingRequests();
6412 if (rc < 0) {
6413 LOGE("notifyErrorForPendingRequests failed");
6414 pthread_mutex_unlock(&mMutex);
6415 return rc;
6416 }
6417
6418 //unblock process_capture_request
6419 mPendingLiveRequest = 0;
6420 unblockRequestIfNecessary();
6421
6422 mFlushPerf = false;
6423 pthread_mutex_unlock(&mMutex);
6424 LOGD ("Flush Operation complete. rc = %d", rc);
6425 return rc;
6426}
6427
6428/*===========================================================================
6429 * FUNCTION : handleCameraDeviceError
6430 *
6431 * DESCRIPTION: This function calls internal flush and notifies the error to
6432 * framework and updates the state variable.
6433 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006434 * PARAMETERS :
6435 * @stopChannelImmediately : stop channels immediately without waiting for
6436 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006437 *
6438 * RETURN : NO_ERROR on Success
6439 * Error code on failure
6440 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006441int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006442{
6443 int32_t rc = NO_ERROR;
6444
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006445 {
6446 Mutex::Autolock lock(mFlushLock);
6447 pthread_mutex_lock(&mMutex);
6448 if (mState != ERROR) {
6449 //if mState != ERROR, nothing to be done
6450 pthread_mutex_unlock(&mMutex);
6451 return NO_ERROR;
6452 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006453 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006454
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006455 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006456 if (NO_ERROR != rc) {
6457 LOGE("internal flush to handle mState = ERROR failed");
6458 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006459
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006460 pthread_mutex_lock(&mMutex);
6461 mState = DEINIT;
6462 pthread_mutex_unlock(&mMutex);
6463 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006464
6465 camera3_notify_msg_t notify_msg;
6466 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6467 notify_msg.type = CAMERA3_MSG_ERROR;
6468 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6469 notify_msg.message.error.error_stream = NULL;
6470 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006471 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006472
6473 return rc;
6474}
6475
6476/*===========================================================================
6477 * FUNCTION : captureResultCb
6478 *
6479 * DESCRIPTION: Callback handler for all capture result
6480 * (streams, as well as metadata)
6481 *
6482 * PARAMETERS :
6483 * @metadata : metadata information
6484 * @buffer : actual gralloc buffer to be returned to frameworks.
6485 * NULL if metadata.
6486 *
6487 * RETURN : NONE
6488 *==========================================================================*/
6489void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6490 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6491{
6492 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006493 pthread_mutex_lock(&mMutex);
6494 uint8_t batchSize = mBatchSize;
6495 pthread_mutex_unlock(&mMutex);
6496 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006497 handleBatchMetadata(metadata_buf,
6498 true /* free_and_bufdone_meta_buf */);
6499 } else { /* mBatchSize = 0 */
6500 hdrPlusPerfLock(metadata_buf);
6501 pthread_mutex_lock(&mMutex);
6502 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006503 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006504 true /* last urgent frame of batch metadata */,
6505 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006506 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006507 pthread_mutex_unlock(&mMutex);
6508 }
6509 } else if (isInputBuffer) {
6510 pthread_mutex_lock(&mMutex);
6511 handleInputBufferWithLock(frame_number);
6512 pthread_mutex_unlock(&mMutex);
6513 } else {
6514 pthread_mutex_lock(&mMutex);
6515 handleBufferWithLock(buffer, frame_number);
6516 pthread_mutex_unlock(&mMutex);
6517 }
6518 return;
6519}
6520
6521/*===========================================================================
6522 * FUNCTION : getReprocessibleOutputStreamId
6523 *
6524 * DESCRIPTION: Get source output stream id for the input reprocess stream
6525 * based on size and format, which would be the largest
6526 * output stream if an input stream exists.
6527 *
6528 * PARAMETERS :
6529 * @id : return the stream id if found
6530 *
6531 * RETURN : int32_t type of status
6532 * NO_ERROR -- success
6533 * none-zero failure code
6534 *==========================================================================*/
6535int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6536{
6537 /* check if any output or bidirectional stream with the same size and format
6538 and return that stream */
6539 if ((mInputStreamInfo.dim.width > 0) &&
6540 (mInputStreamInfo.dim.height > 0)) {
6541 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6542 it != mStreamInfo.end(); it++) {
6543
6544 camera3_stream_t *stream = (*it)->stream;
6545 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6546 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6547 (stream->format == mInputStreamInfo.format)) {
6548 // Usage flag for an input stream and the source output stream
6549 // may be different.
6550 LOGD("Found reprocessible output stream! %p", *it);
6551 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6552 stream->usage, mInputStreamInfo.usage);
6553
6554 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6555 if (channel != NULL && channel->mStreams[0]) {
6556 id = channel->mStreams[0]->getMyServerID();
6557 return NO_ERROR;
6558 }
6559 }
6560 }
6561 } else {
6562 LOGD("No input stream, so no reprocessible output stream");
6563 }
6564 return NAME_NOT_FOUND;
6565}
6566
6567/*===========================================================================
6568 * FUNCTION : lookupFwkName
6569 *
6570 * DESCRIPTION: In case the enum is not same in fwk and backend
6571 * make sure the parameter is correctly propogated
6572 *
6573 * PARAMETERS :
6574 * @arr : map between the two enums
6575 * @len : len of the map
6576 * @hal_name : name of the hal_parm to map
6577 *
6578 * RETURN : int type of status
6579 * fwk_name -- success
6580 * none-zero failure code
6581 *==========================================================================*/
6582template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6583 size_t len, halType hal_name)
6584{
6585
6586 for (size_t i = 0; i < len; i++) {
6587 if (arr[i].hal_name == hal_name) {
6588 return arr[i].fwk_name;
6589 }
6590 }
6591
6592 /* Not able to find matching framework type is not necessarily
6593 * an error case. This happens when mm-camera supports more attributes
6594 * than the frameworks do */
6595 LOGH("Cannot find matching framework type");
6596 return NAME_NOT_FOUND;
6597}
6598
6599/*===========================================================================
6600 * FUNCTION : lookupHalName
6601 *
6602 * DESCRIPTION: In case the enum is not same in fwk and backend
6603 * make sure the parameter is correctly propogated
6604 *
6605 * PARAMETERS :
6606 * @arr : map between the two enums
6607 * @len : len of the map
6608 * @fwk_name : name of the hal_parm to map
6609 *
6610 * RETURN : int32_t type of status
6611 * hal_name -- success
6612 * none-zero failure code
6613 *==========================================================================*/
6614template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6615 size_t len, fwkType fwk_name)
6616{
6617 for (size_t i = 0; i < len; i++) {
6618 if (arr[i].fwk_name == fwk_name) {
6619 return arr[i].hal_name;
6620 }
6621 }
6622
6623 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6624 return NAME_NOT_FOUND;
6625}
6626
6627/*===========================================================================
6628 * FUNCTION : lookupProp
6629 *
6630 * DESCRIPTION: lookup a value by its name
6631 *
6632 * PARAMETERS :
6633 * @arr : map between the two enums
6634 * @len : size of the map
6635 * @name : name to be looked up
6636 *
6637 * RETURN : Value if found
6638 * CAM_CDS_MODE_MAX if not found
6639 *==========================================================================*/
6640template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6641 size_t len, const char *name)
6642{
6643 if (name) {
6644 for (size_t i = 0; i < len; i++) {
6645 if (!strcmp(arr[i].desc, name)) {
6646 return arr[i].val;
6647 }
6648 }
6649 }
6650 return CAM_CDS_MODE_MAX;
6651}
6652
6653/*===========================================================================
6654 *
6655 * DESCRIPTION:
6656 *
6657 * PARAMETERS :
6658 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006659 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006660 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006661 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6662 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006663 *
6664 * RETURN : camera_metadata_t*
6665 * metadata in a format specified by fwk
6666 *==========================================================================*/
6667camera_metadata_t*
6668QCamera3HardwareInterface::translateFromHalMetadata(
6669 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006670 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006671 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006672 bool lastMetadataInBatch,
6673 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006674{
6675 CameraMetadata camMetadata;
6676 camera_metadata_t *resultMetadata;
6677
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006678 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006679 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6680 * Timestamp is needed because it's used for shutter notify calculation.
6681 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006682 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006683 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006684 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006685 }
6686
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006687 if (pendingRequest.jpegMetadata.entryCount())
6688 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006689
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006690 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6691 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6692 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6693 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6694 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006695 if (mBatchSize == 0) {
6696 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006697 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006698 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006699
Samuel Ha68ba5172016-12-15 18:41:12 -08006700 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6701 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006702 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006703 // DevCamDebug metadata translateFromHalMetadata AF
6704 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6705 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6706 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6707 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6708 }
6709 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6710 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6711 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6712 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6713 }
6714 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6715 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6716 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6717 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6718 }
6719 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6720 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6721 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6722 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6723 }
6724 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6725 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6726 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6727 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6728 }
6729 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6730 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6731 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6732 *DevCamDebug_af_monitor_pdaf_target_pos;
6733 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6734 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6735 }
6736 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6737 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6738 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6739 *DevCamDebug_af_monitor_pdaf_confidence;
6740 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6741 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6742 }
6743 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6744 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6745 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6746 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6747 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6748 }
6749 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6750 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6751 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6752 *DevCamDebug_af_monitor_tof_target_pos;
6753 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6754 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6755 }
6756 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6757 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6758 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6759 *DevCamDebug_af_monitor_tof_confidence;
6760 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6761 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6762 }
6763 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6764 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6765 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6766 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6767 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6768 }
6769 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6770 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6771 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6772 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6773 &fwk_DevCamDebug_af_monitor_type_select, 1);
6774 }
6775 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6776 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6777 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6778 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6779 &fwk_DevCamDebug_af_monitor_refocus, 1);
6780 }
6781 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6782 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6783 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6784 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6785 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6786 }
6787 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6788 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6789 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6790 *DevCamDebug_af_search_pdaf_target_pos;
6791 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6792 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6793 }
6794 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6795 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6796 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6797 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6798 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6799 }
6800 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6801 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6802 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6803 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6804 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6805 }
6806 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6807 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6808 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6809 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6810 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6811 }
6812 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6813 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6814 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6815 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6816 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6817 }
6818 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6819 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6820 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6821 *DevCamDebug_af_search_tof_target_pos;
6822 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6823 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6824 }
6825 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6826 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6827 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6828 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6829 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6830 }
6831 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6832 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6833 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6834 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6835 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6836 }
6837 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6838 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6839 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6840 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6841 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6842 }
6843 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6844 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6845 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6846 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6847 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6848 }
6849 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6850 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6851 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6852 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6853 &fwk_DevCamDebug_af_search_type_select, 1);
6854 }
6855 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6856 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6857 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6858 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6859 &fwk_DevCamDebug_af_search_next_pos, 1);
6860 }
6861 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6862 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6863 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6864 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6865 &fwk_DevCamDebug_af_search_target_pos, 1);
6866 }
6867 // DevCamDebug metadata translateFromHalMetadata AEC
6868 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6869 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6870 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6871 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6872 }
6873 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6874 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6875 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6876 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6877 }
6878 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6879 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6880 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6881 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6882 }
6883 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6884 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6885 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6886 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6887 }
6888 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6889 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6890 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6891 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6892 }
6893 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6894 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6895 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6896 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6897 }
6898 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6899 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6900 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6901 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6902 }
6903 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6904 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6905 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6906 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6907 }
Samuel Ha34229982017-02-17 13:51:11 -08006908 // DevCamDebug metadata translateFromHalMetadata zzHDR
6909 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6910 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6911 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6912 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6913 }
6914 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6915 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006916 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006917 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6918 }
6919 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6920 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6921 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6922 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6923 }
6924 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6925 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006926 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006927 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6928 }
6929 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6930 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6931 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6932 *DevCamDebug_aec_hdr_sensitivity_ratio;
6933 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6934 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6935 }
6936 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6937 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6938 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6939 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6940 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6941 }
6942 // DevCamDebug metadata translateFromHalMetadata ADRC
6943 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6944 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6945 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6946 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6947 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6948 }
6949 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6950 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6951 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6952 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6953 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6954 }
6955 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6956 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6957 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6958 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6959 }
6960 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6961 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6962 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6963 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6964 }
6965 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6966 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6967 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6968 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6969 }
6970 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6971 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6972 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6973 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6974 }
Samuel Habdf4fac2017-07-28 17:21:18 -07006975 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
6976 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
6977 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
6978 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
6979 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
6980 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
6981 }
6982 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
6983 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
6984 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
6985 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
6986 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
6987 }
6988 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
6989 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
6990 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
6991 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
6992 &fwk_DevCamDebug_aec_subject_motion, 1);
6993 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006994 // DevCamDebug metadata translateFromHalMetadata AWB
6995 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6996 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6997 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6998 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6999 }
7000 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7001 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7002 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7003 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7004 }
7005 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7006 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7007 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7008 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7009 }
7010 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7011 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7012 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7013 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7014 }
7015 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7016 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7017 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7018 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7019 }
7020 }
7021 // atrace_end(ATRACE_TAG_ALWAYS);
7022
Thierry Strudel3d639192016-09-09 11:52:26 -07007023 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7024 int64_t fwk_frame_number = *frame_number;
7025 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7026 }
7027
7028 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7029 int32_t fps_range[2];
7030 fps_range[0] = (int32_t)float_range->min_fps;
7031 fps_range[1] = (int32_t)float_range->max_fps;
7032 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7033 fps_range, 2);
7034 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7035 fps_range[0], fps_range[1]);
7036 }
7037
7038 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7039 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7040 }
7041
7042 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7043 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7044 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7045 *sceneMode);
7046 if (NAME_NOT_FOUND != val) {
7047 uint8_t fwkSceneMode = (uint8_t)val;
7048 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7049 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7050 fwkSceneMode);
7051 }
7052 }
7053
7054 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7055 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7056 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7057 }
7058
7059 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7060 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7061 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7062 }
7063
7064 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7065 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7066 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7067 }
7068
7069 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7070 CAM_INTF_META_EDGE_MODE, metadata) {
7071 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7072 }
7073
7074 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7075 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7076 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7077 }
7078
7079 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7080 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7081 }
7082
7083 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7084 if (0 <= *flashState) {
7085 uint8_t fwk_flashState = (uint8_t) *flashState;
7086 if (!gCamCapability[mCameraId]->flash_available) {
7087 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7088 }
7089 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7090 }
7091 }
7092
7093 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7094 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7095 if (NAME_NOT_FOUND != val) {
7096 uint8_t fwk_flashMode = (uint8_t)val;
7097 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7098 }
7099 }
7100
7101 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7102 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7103 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7104 }
7105
7106 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7107 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7108 }
7109
7110 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7111 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7112 }
7113
7114 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7115 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7116 }
7117
7118 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7119 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7120 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7121 }
7122
7123 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7124 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7125 LOGD("fwk_videoStab = %d", fwk_videoStab);
7126 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7127 } else {
7128 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7129 // and so hardcoding the Video Stab result to OFF mode.
7130 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7131 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007132 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007133 }
7134
7135 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7136 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7137 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7138 }
7139
7140 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7141 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7142 }
7143
Thierry Strudel3d639192016-09-09 11:52:26 -07007144 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7145 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007146 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007147
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007148 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7149 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007150
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007151 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007152 blackLevelAppliedPattern->cam_black_level[0],
7153 blackLevelAppliedPattern->cam_black_level[1],
7154 blackLevelAppliedPattern->cam_black_level[2],
7155 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007156 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7157 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007158
7159#ifndef USE_HAL_3_3
7160 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307161 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007162 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307163 fwk_blackLevelInd[0] /= 16.0;
7164 fwk_blackLevelInd[1] /= 16.0;
7165 fwk_blackLevelInd[2] /= 16.0;
7166 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007167 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7168 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007169#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007170 }
7171
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007172#ifndef USE_HAL_3_3
7173 // Fixed whitelevel is used by ISP/Sensor
7174 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7175 &gCamCapability[mCameraId]->white_level, 1);
7176#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007177
7178 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7179 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7180 int32_t scalerCropRegion[4];
7181 scalerCropRegion[0] = hScalerCropRegion->left;
7182 scalerCropRegion[1] = hScalerCropRegion->top;
7183 scalerCropRegion[2] = hScalerCropRegion->width;
7184 scalerCropRegion[3] = hScalerCropRegion->height;
7185
7186 // Adjust crop region from sensor output coordinate system to active
7187 // array coordinate system.
7188 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7189 scalerCropRegion[2], scalerCropRegion[3]);
7190
7191 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7192 }
7193
7194 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7195 LOGD("sensorExpTime = %lld", *sensorExpTime);
7196 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7197 }
7198
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007199 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7200 LOGD("expTimeBoost = %f", *expTimeBoost);
7201 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7202 }
7203
Thierry Strudel3d639192016-09-09 11:52:26 -07007204 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7205 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7206 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7207 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7208 }
7209
7210 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7211 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7212 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7213 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7214 sensorRollingShutterSkew, 1);
7215 }
7216
7217 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7218 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7219 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7220
7221 //calculate the noise profile based on sensitivity
7222 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7223 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7224 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7225 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7226 noise_profile[i] = noise_profile_S;
7227 noise_profile[i+1] = noise_profile_O;
7228 }
7229 LOGD("noise model entry (S, O) is (%f, %f)",
7230 noise_profile_S, noise_profile_O);
7231 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7232 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7233 }
7234
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007235#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007236 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007237 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007238 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007239 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007240 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7241 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7242 }
7243 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007244#endif
7245
Thierry Strudel3d639192016-09-09 11:52:26 -07007246 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7247 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7248 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7249 }
7250
7251 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7252 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7253 *faceDetectMode);
7254 if (NAME_NOT_FOUND != val) {
7255 uint8_t fwk_faceDetectMode = (uint8_t)val;
7256 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7257
7258 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7259 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7260 CAM_INTF_META_FACE_DETECTION, metadata) {
7261 uint8_t numFaces = MIN(
7262 faceDetectionInfo->num_faces_detected, MAX_ROI);
7263 int32_t faceIds[MAX_ROI];
7264 uint8_t faceScores[MAX_ROI];
7265 int32_t faceRectangles[MAX_ROI * 4];
7266 int32_t faceLandmarks[MAX_ROI * 6];
7267 size_t j = 0, k = 0;
7268
7269 for (size_t i = 0; i < numFaces; i++) {
7270 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7271 // Adjust crop region from sensor output coordinate system to active
7272 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007273 cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
Thierry Strudel3d639192016-09-09 11:52:26 -07007274 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7275 rect.width, rect.height);
7276
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007277 convertToRegions(rect, faceRectangles+j, -1);
Thierry Strudel3d639192016-09-09 11:52:26 -07007278
Jason Lee8ce36fa2017-04-19 19:40:37 -07007279 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7280 "bottom-right (%d, %d)",
7281 faceDetectionInfo->frame_id, i,
7282 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7283 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7284
Thierry Strudel3d639192016-09-09 11:52:26 -07007285 j+= 4;
7286 }
7287 if (numFaces <= 0) {
7288 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7289 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7290 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7291 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7292 }
7293
7294 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7295 numFaces);
7296 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7297 faceRectangles, numFaces * 4U);
7298 if (fwk_faceDetectMode ==
7299 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7300 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7301 CAM_INTF_META_FACE_LANDMARK, metadata) {
7302
7303 for (size_t i = 0; i < numFaces; i++) {
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007304 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
Thierry Strudel3d639192016-09-09 11:52:26 -07007305 // Map the co-ordinate sensor output coordinate system to active
7306 // array coordinate system.
7307 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007308 face_landmarks.left_eye_center.x,
7309 face_landmarks.left_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007310 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007311 face_landmarks.right_eye_center.x,
7312 face_landmarks.right_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007313 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007314 face_landmarks.mouth_center.x,
7315 face_landmarks.mouth_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007316
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007317 convertLandmarks(face_landmarks, faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007318
7319 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7320 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7321 faceDetectionInfo->frame_id, i,
7322 faceLandmarks[k + LEFT_EYE_X],
7323 faceLandmarks[k + LEFT_EYE_Y],
7324 faceLandmarks[k + RIGHT_EYE_X],
7325 faceLandmarks[k + RIGHT_EYE_Y],
7326 faceLandmarks[k + MOUTH_X],
7327 faceLandmarks[k + MOUTH_Y]);
7328
Thierry Strudel04e026f2016-10-10 11:27:36 -07007329 k+= TOTAL_LANDMARK_INDICES;
7330 }
7331 } else {
7332 for (size_t i = 0; i < numFaces; i++) {
7333 setInvalidLandmarks(faceLandmarks+k);
7334 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007335 }
7336 }
7337
Jason Lee49619db2017-04-13 12:07:22 -07007338 for (size_t i = 0; i < numFaces; i++) {
7339 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7340
7341 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7342 faceDetectionInfo->frame_id, i, faceIds[i]);
7343 }
7344
Thierry Strudel3d639192016-09-09 11:52:26 -07007345 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7346 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7347 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007348 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007349 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7350 CAM_INTF_META_FACE_BLINK, metadata) {
7351 uint8_t detected[MAX_ROI];
7352 uint8_t degree[MAX_ROI * 2];
7353 for (size_t i = 0; i < numFaces; i++) {
7354 detected[i] = blinks->blink[i].blink_detected;
7355 degree[2 * i] = blinks->blink[i].left_blink;
7356 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007357
Jason Lee49619db2017-04-13 12:07:22 -07007358 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7359 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7360 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7361 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007362 }
7363 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7364 detected, numFaces);
7365 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7366 degree, numFaces * 2);
7367 }
7368 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7369 CAM_INTF_META_FACE_SMILE, metadata) {
7370 uint8_t degree[MAX_ROI];
7371 uint8_t confidence[MAX_ROI];
7372 for (size_t i = 0; i < numFaces; i++) {
7373 degree[i] = smiles->smile[i].smile_degree;
7374 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007375
Jason Lee49619db2017-04-13 12:07:22 -07007376 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7377 "smile_degree=%d, smile_score=%d",
7378 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007379 }
7380 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7381 degree, numFaces);
7382 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7383 confidence, numFaces);
7384 }
7385 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7386 CAM_INTF_META_FACE_GAZE, metadata) {
7387 int8_t angle[MAX_ROI];
7388 int32_t direction[MAX_ROI * 3];
7389 int8_t degree[MAX_ROI * 2];
7390 for (size_t i = 0; i < numFaces; i++) {
7391 angle[i] = gazes->gaze[i].gaze_angle;
7392 direction[3 * i] = gazes->gaze[i].updown_dir;
7393 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7394 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7395 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7396 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007397
7398 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7399 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7400 "left_right_gaze=%d, top_bottom_gaze=%d",
7401 faceDetectionInfo->frame_id, i, angle[i],
7402 direction[3 * i], direction[3 * i + 1],
7403 direction[3 * i + 2],
7404 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007405 }
7406 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7407 (uint8_t *)angle, numFaces);
7408 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7409 direction, numFaces * 3);
7410 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7411 (uint8_t *)degree, numFaces * 2);
7412 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007413 }
7414 }
7415 }
7416 }
7417
7418 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7419 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007420 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007421 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007422 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007423
Shuzhen Wang14415f52016-11-16 18:26:18 -08007424 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7425 histogramBins = *histBins;
7426 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7427 }
7428
7429 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007430 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7431 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007432 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007433
7434 switch (stats_data->type) {
7435 case CAM_HISTOGRAM_TYPE_BAYER:
7436 switch (stats_data->bayer_stats.data_type) {
7437 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007438 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7439 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007440 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007441 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7442 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007443 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007444 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7445 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007446 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007447 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007448 case CAM_STATS_CHANNEL_R:
7449 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007450 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7451 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007452 }
7453 break;
7454 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007455 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007456 break;
7457 }
7458
Shuzhen Wang14415f52016-11-16 18:26:18 -08007459 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007460 }
7461 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007462 }
7463
7464 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7465 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7466 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7467 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7468 }
7469
7470 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7471 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7472 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7473 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7474 }
7475
7476 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7477 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7478 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7479 CAM_MAX_SHADING_MAP_HEIGHT);
7480 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7481 CAM_MAX_SHADING_MAP_WIDTH);
7482 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7483 lensShadingMap->lens_shading, 4U * map_width * map_height);
7484 }
7485
7486 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7487 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7488 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7489 }
7490
7491 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7492 //Populate CAM_INTF_META_TONEMAP_CURVES
7493 /* ch0 = G, ch 1 = B, ch 2 = R*/
7494 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7495 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7496 tonemap->tonemap_points_cnt,
7497 CAM_MAX_TONEMAP_CURVE_SIZE);
7498 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7499 }
7500
7501 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7502 &tonemap->curves[0].tonemap_points[0][0],
7503 tonemap->tonemap_points_cnt * 2);
7504
7505 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7506 &tonemap->curves[1].tonemap_points[0][0],
7507 tonemap->tonemap_points_cnt * 2);
7508
7509 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7510 &tonemap->curves[2].tonemap_points[0][0],
7511 tonemap->tonemap_points_cnt * 2);
7512 }
7513
7514 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7515 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7516 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7517 CC_GAIN_MAX);
7518 }
7519
7520 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7521 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7522 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7523 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7524 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7525 }
7526
7527 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7528 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7529 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7530 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7531 toneCurve->tonemap_points_cnt,
7532 CAM_MAX_TONEMAP_CURVE_SIZE);
7533 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7534 }
7535 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7536 (float*)toneCurve->curve.tonemap_points,
7537 toneCurve->tonemap_points_cnt * 2);
7538 }
7539
7540 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7541 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7542 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7543 predColorCorrectionGains->gains, 4);
7544 }
7545
7546 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7547 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7548 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7549 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7550 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7551 }
7552
7553 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7554 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7555 }
7556
7557 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7558 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7559 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7560 }
7561
7562 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7563 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7564 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7565 }
7566
7567 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7568 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7569 *effectMode);
7570 if (NAME_NOT_FOUND != val) {
7571 uint8_t fwk_effectMode = (uint8_t)val;
7572 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7573 }
7574 }
7575
7576 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7577 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7578 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7579 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7580 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7581 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7582 }
7583 int32_t fwk_testPatternData[4];
7584 fwk_testPatternData[0] = testPatternData->r;
7585 fwk_testPatternData[3] = testPatternData->b;
7586 switch (gCamCapability[mCameraId]->color_arrangement) {
7587 case CAM_FILTER_ARRANGEMENT_RGGB:
7588 case CAM_FILTER_ARRANGEMENT_GRBG:
7589 fwk_testPatternData[1] = testPatternData->gr;
7590 fwk_testPatternData[2] = testPatternData->gb;
7591 break;
7592 case CAM_FILTER_ARRANGEMENT_GBRG:
7593 case CAM_FILTER_ARRANGEMENT_BGGR:
7594 fwk_testPatternData[2] = testPatternData->gr;
7595 fwk_testPatternData[1] = testPatternData->gb;
7596 break;
7597 default:
7598 LOGE("color arrangement %d is not supported",
7599 gCamCapability[mCameraId]->color_arrangement);
7600 break;
7601 }
7602 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7603 }
7604
7605 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7606 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7607 }
7608
7609 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7610 String8 str((const char *)gps_methods);
7611 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7612 }
7613
7614 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7615 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7616 }
7617
7618 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7619 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7620 }
7621
7622 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7623 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7624 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7625 }
7626
7627 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7628 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7629 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7630 }
7631
7632 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7633 int32_t fwk_thumb_size[2];
7634 fwk_thumb_size[0] = thumb_size->width;
7635 fwk_thumb_size[1] = thumb_size->height;
7636 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7637 }
7638
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007639 // Skip reprocess metadata if there is no input stream.
7640 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7641 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7642 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7643 privateData,
7644 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7645 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007646 }
7647
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007648 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007649 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007650 meteringMode, 1);
7651 }
7652
Thierry Strudel54dc9782017-02-15 12:12:10 -08007653 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7654 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7655 LOGD("hdr_scene_data: %d %f\n",
7656 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7657 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7658 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7659 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7660 &isHdr, 1);
7661 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7662 &isHdrConfidence, 1);
7663 }
7664
7665
7666
Thierry Strudel3d639192016-09-09 11:52:26 -07007667 if (metadata->is_tuning_params_valid) {
7668 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7669 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7670 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7671
7672
7673 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7674 sizeof(uint32_t));
7675 data += sizeof(uint32_t);
7676
7677 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7678 sizeof(uint32_t));
7679 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7680 data += sizeof(uint32_t);
7681
7682 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7683 sizeof(uint32_t));
7684 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7685 data += sizeof(uint32_t);
7686
7687 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7688 sizeof(uint32_t));
7689 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7690 data += sizeof(uint32_t);
7691
7692 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7693 sizeof(uint32_t));
7694 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7695 data += sizeof(uint32_t);
7696
7697 metadata->tuning_params.tuning_mod3_data_size = 0;
7698 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7699 sizeof(uint32_t));
7700 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7701 data += sizeof(uint32_t);
7702
7703 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7704 TUNING_SENSOR_DATA_MAX);
7705 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7706 count);
7707 data += count;
7708
7709 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7710 TUNING_VFE_DATA_MAX);
7711 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7712 count);
7713 data += count;
7714
7715 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7716 TUNING_CPP_DATA_MAX);
7717 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7718 count);
7719 data += count;
7720
7721 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7722 TUNING_CAC_DATA_MAX);
7723 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7724 count);
7725 data += count;
7726
7727 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7728 (int32_t *)(void *)tuning_meta_data_blob,
7729 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7730 }
7731
7732 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7733 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7734 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7735 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7736 NEUTRAL_COL_POINTS);
7737 }
7738
7739 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7740 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7741 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7742 }
7743
7744 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7745 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7746 // Adjust crop region from sensor output coordinate system to active
7747 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007748 cam_rect_t hAeRect = hAeRegions->rect;
7749 mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7750 hAeRect.width, hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007751
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007752 convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
Thierry Strudel3d639192016-09-09 11:52:26 -07007753 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7754 REGIONS_TUPLE_COUNT);
7755 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7756 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007757 hAeRect.left, hAeRect.top, hAeRect.width,
7758 hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007759 }
7760
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007761 if (!pendingRequest.focusStateSent) {
7762 if (pendingRequest.focusStateValid) {
7763 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7764 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007765 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007766 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7767 uint8_t fwk_afState = (uint8_t) *afState;
7768 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7769 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7770 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007771 }
7772 }
7773
Thierry Strudel3d639192016-09-09 11:52:26 -07007774 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7775 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7776 }
7777
7778 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7779 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7780 }
7781
7782 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7783 uint8_t fwk_lensState = *lensState;
7784 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7785 }
7786
Thierry Strudel3d639192016-09-09 11:52:26 -07007787 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007788 uint32_t ab_mode = *hal_ab_mode;
7789 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7790 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7791 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7792 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007793 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007794 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007795 if (NAME_NOT_FOUND != val) {
7796 uint8_t fwk_ab_mode = (uint8_t)val;
7797 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7798 }
7799 }
7800
7801 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7802 int val = lookupFwkName(SCENE_MODES_MAP,
7803 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7804 if (NAME_NOT_FOUND != val) {
7805 uint8_t fwkBestshotMode = (uint8_t)val;
7806 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7807 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7808 } else {
7809 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7810 }
7811 }
7812
7813 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7814 uint8_t fwk_mode = (uint8_t) *mode;
7815 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7816 }
7817
7818 /* Constant metadata values to be update*/
7819 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7820 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7821
7822 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7823 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7824
7825 int32_t hotPixelMap[2];
7826 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7827
7828 // CDS
7829 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7830 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7831 }
7832
Thierry Strudel04e026f2016-10-10 11:27:36 -07007833 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7834 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007835 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007836 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7837 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7838 } else {
7839 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7840 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007841
7842 if(fwk_hdr != curr_hdr_state) {
7843 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7844 if(fwk_hdr)
7845 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7846 else
7847 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7848 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007849 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7850 }
7851
Thierry Strudel54dc9782017-02-15 12:12:10 -08007852 //binning correction
7853 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7854 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7855 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7856 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7857 }
7858
Thierry Strudel04e026f2016-10-10 11:27:36 -07007859 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007860 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007861 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7862 int8_t is_ir_on = 0;
7863
7864 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7865 if(is_ir_on != curr_ir_state) {
7866 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7867 if(is_ir_on)
7868 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7869 else
7870 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7871 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007872 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007873 }
7874
Thierry Strudel269c81a2016-10-12 12:13:59 -07007875 // AEC SPEED
7876 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7877 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7878 }
7879
7880 // AWB SPEED
7881 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7882 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7883 }
7884
Thierry Strudel3d639192016-09-09 11:52:26 -07007885 // TNR
7886 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7887 uint8_t tnr_enable = tnr->denoise_enable;
7888 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007889 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7890 int8_t is_tnr_on = 0;
7891
7892 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7893 if(is_tnr_on != curr_tnr_state) {
7894 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7895 if(is_tnr_on)
7896 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7897 else
7898 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7899 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007900
7901 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7902 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7903 }
7904
7905 // Reprocess crop data
7906 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7907 uint8_t cnt = crop_data->num_of_streams;
7908 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7909 // mm-qcamera-daemon only posts crop_data for streams
7910 // not linked to pproc. So no valid crop metadata is not
7911 // necessarily an error case.
7912 LOGD("No valid crop metadata entries");
7913 } else {
7914 uint32_t reproc_stream_id;
7915 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7916 LOGD("No reprocessible stream found, ignore crop data");
7917 } else {
7918 int rc = NO_ERROR;
7919 Vector<int32_t> roi_map;
7920 int32_t *crop = new int32_t[cnt*4];
7921 if (NULL == crop) {
7922 rc = NO_MEMORY;
7923 }
7924 if (NO_ERROR == rc) {
7925 int32_t streams_found = 0;
7926 for (size_t i = 0; i < cnt; i++) {
7927 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7928 if (pprocDone) {
7929 // HAL already does internal reprocessing,
7930 // either via reprocessing before JPEG encoding,
7931 // or offline postprocessing for pproc bypass case.
7932 crop[0] = 0;
7933 crop[1] = 0;
7934 crop[2] = mInputStreamInfo.dim.width;
7935 crop[3] = mInputStreamInfo.dim.height;
7936 } else {
7937 crop[0] = crop_data->crop_info[i].crop.left;
7938 crop[1] = crop_data->crop_info[i].crop.top;
7939 crop[2] = crop_data->crop_info[i].crop.width;
7940 crop[3] = crop_data->crop_info[i].crop.height;
7941 }
7942 roi_map.add(crop_data->crop_info[i].roi_map.left);
7943 roi_map.add(crop_data->crop_info[i].roi_map.top);
7944 roi_map.add(crop_data->crop_info[i].roi_map.width);
7945 roi_map.add(crop_data->crop_info[i].roi_map.height);
7946 streams_found++;
7947 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7948 crop[0], crop[1], crop[2], crop[3]);
7949 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7950 crop_data->crop_info[i].roi_map.left,
7951 crop_data->crop_info[i].roi_map.top,
7952 crop_data->crop_info[i].roi_map.width,
7953 crop_data->crop_info[i].roi_map.height);
7954 break;
7955
7956 }
7957 }
7958 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7959 &streams_found, 1);
7960 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7961 crop, (size_t)(streams_found * 4));
7962 if (roi_map.array()) {
7963 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7964 roi_map.array(), roi_map.size());
7965 }
7966 }
7967 if (crop) {
7968 delete [] crop;
7969 }
7970 }
7971 }
7972 }
7973
7974 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7975 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7976 // so hardcoding the CAC result to OFF mode.
7977 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7978 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7979 } else {
7980 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7981 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7982 *cacMode);
7983 if (NAME_NOT_FOUND != val) {
7984 uint8_t resultCacMode = (uint8_t)val;
7985 // check whether CAC result from CB is equal to Framework set CAC mode
7986 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007987 if (pendingRequest.fwkCacMode != resultCacMode) {
7988 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07007989 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007990 //Check if CAC is disabled by property
7991 if (m_cacModeDisabled) {
7992 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7993 }
7994
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007995 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007996 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7997 } else {
7998 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7999 }
8000 }
8001 }
8002
8003 // Post blob of cam_cds_data through vendor tag.
8004 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8005 uint8_t cnt = cdsInfo->num_of_streams;
8006 cam_cds_data_t cdsDataOverride;
8007 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8008 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8009 cdsDataOverride.num_of_streams = 1;
8010 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8011 uint32_t reproc_stream_id;
8012 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8013 LOGD("No reprocessible stream found, ignore cds data");
8014 } else {
8015 for (size_t i = 0; i < cnt; i++) {
8016 if (cdsInfo->cds_info[i].stream_id ==
8017 reproc_stream_id) {
8018 cdsDataOverride.cds_info[0].cds_enable =
8019 cdsInfo->cds_info[i].cds_enable;
8020 break;
8021 }
8022 }
8023 }
8024 } else {
8025 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8026 }
8027 camMetadata.update(QCAMERA3_CDS_INFO,
8028 (uint8_t *)&cdsDataOverride,
8029 sizeof(cam_cds_data_t));
8030 }
8031
8032 // Ldaf calibration data
8033 if (!mLdafCalibExist) {
8034 IF_META_AVAILABLE(uint32_t, ldafCalib,
8035 CAM_INTF_META_LDAF_EXIF, metadata) {
8036 mLdafCalibExist = true;
8037 mLdafCalib[0] = ldafCalib[0];
8038 mLdafCalib[1] = ldafCalib[1];
8039 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8040 ldafCalib[0], ldafCalib[1]);
8041 }
8042 }
8043
Thierry Strudel54dc9782017-02-15 12:12:10 -08008044 // EXIF debug data through vendor tag
8045 /*
8046 * Mobicat Mask can assume 3 values:
8047 * 1 refers to Mobicat data,
8048 * 2 refers to Stats Debug and Exif Debug Data
8049 * 3 refers to Mobicat and Stats Debug Data
8050 * We want to make sure that we are sending Exif debug data
8051 * only when Mobicat Mask is 2.
8052 */
8053 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8054 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8055 (uint8_t *)(void *)mExifParams.debug_params,
8056 sizeof(mm_jpeg_debug_exif_params_t));
8057 }
8058
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008059 // Reprocess and DDM debug data through vendor tag
8060 cam_reprocess_info_t repro_info;
8061 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008062 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8063 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008064 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008065 }
8066 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8067 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008068 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008069 }
8070 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8071 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008072 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008073 }
8074 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8075 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008076 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008077 }
8078 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8079 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008080 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008081 }
8082 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008083 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008084 }
8085 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8086 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008087 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008088 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008089 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8090 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8091 }
8092 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8093 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8094 }
8095 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8096 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008097
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008098 // INSTANT AEC MODE
8099 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8100 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8101 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8102 }
8103
Shuzhen Wange763e802016-03-31 10:24:29 -07008104 // AF scene change
8105 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8106 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8107 }
8108
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008109 // Enable ZSL
8110 if (enableZsl != nullptr) {
8111 uint8_t value = *enableZsl ?
8112 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8113 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8114 }
8115
Xu Han821ea9c2017-05-23 09:00:40 -07008116 // OIS Data
8117 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8118 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8119 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8120 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8121 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8122 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8123 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8124 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8125 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8126 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8127 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
Xue Tu2c3e9142017-08-18 16:23:52 -07008128 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8129 frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8130 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8131 frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
Xu Han821ea9c2017-05-23 09:00:40 -07008132 }
8133
Thierry Strudel3d639192016-09-09 11:52:26 -07008134 resultMetadata = camMetadata.release();
8135 return resultMetadata;
8136}
8137
8138/*===========================================================================
8139 * FUNCTION : saveExifParams
8140 *
8141 * DESCRIPTION:
8142 *
8143 * PARAMETERS :
8144 * @metadata : metadata information from callback
8145 *
8146 * RETURN : none
8147 *
8148 *==========================================================================*/
8149void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8150{
8151 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8152 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8153 if (mExifParams.debug_params) {
8154 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8155 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8156 }
8157 }
8158 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8159 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8160 if (mExifParams.debug_params) {
8161 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8162 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8163 }
8164 }
8165 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8166 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8167 if (mExifParams.debug_params) {
8168 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8169 mExifParams.debug_params->af_debug_params_valid = TRUE;
8170 }
8171 }
8172 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8173 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8174 if (mExifParams.debug_params) {
8175 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8176 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8177 }
8178 }
8179 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8180 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8181 if (mExifParams.debug_params) {
8182 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8183 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8184 }
8185 }
8186 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8187 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8188 if (mExifParams.debug_params) {
8189 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8190 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8191 }
8192 }
8193 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8194 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8195 if (mExifParams.debug_params) {
8196 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8197 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8198 }
8199 }
8200 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8201 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8202 if (mExifParams.debug_params) {
8203 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8204 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8205 }
8206 }
8207}
8208
8209/*===========================================================================
8210 * FUNCTION : get3AExifParams
8211 *
8212 * DESCRIPTION:
8213 *
8214 * PARAMETERS : none
8215 *
8216 *
8217 * RETURN : mm_jpeg_exif_params_t
8218 *
8219 *==========================================================================*/
8220mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8221{
8222 return mExifParams;
8223}
8224
8225/*===========================================================================
8226 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8227 *
8228 * DESCRIPTION:
8229 *
8230 * PARAMETERS :
8231 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008232 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8233 * urgent metadata in a batch. Always true for
8234 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008235 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008236 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8237 * i.e. even though it doesn't map to a valid partial
8238 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008239 * RETURN : camera_metadata_t*
8240 * metadata in a format specified by fwk
8241 *==========================================================================*/
8242camera_metadata_t*
8243QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008244 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008245 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008246{
8247 CameraMetadata camMetadata;
8248 camera_metadata_t *resultMetadata;
8249
Shuzhen Wang485e2442017-08-02 12:21:08 -07008250 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008251 /* In batch mode, use empty metadata if this is not the last in batch
8252 */
8253 resultMetadata = allocate_camera_metadata(0, 0);
8254 return resultMetadata;
8255 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008256
8257 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8258 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8259 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8260 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8261 }
8262
8263 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8264 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8265 &aecTrigger->trigger, 1);
8266 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8267 &aecTrigger->trigger_id, 1);
8268 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8269 aecTrigger->trigger);
8270 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8271 aecTrigger->trigger_id);
8272 }
8273
8274 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8275 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8276 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8277 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8278 }
8279
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008280 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8281 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8282 if (NAME_NOT_FOUND != val) {
8283 uint8_t fwkAfMode = (uint8_t)val;
8284 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8285 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8286 } else {
8287 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8288 val);
8289 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008290 }
8291
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008292 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8293 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8294 af_trigger->trigger);
8295 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8296 af_trigger->trigger_id);
8297
8298 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8299 mAfTrigger = *af_trigger;
8300 uint32_t fwk_AfState = (uint32_t) *afState;
8301
8302 // If this is the result for a new trigger, check if there is new early
8303 // af state. If there is, use the last af state for all results
8304 // preceding current partial frame number.
8305 for (auto & pendingRequest : mPendingRequestsList) {
8306 if (pendingRequest.frame_number < frame_number) {
8307 pendingRequest.focusStateValid = true;
8308 pendingRequest.focusState = fwk_AfState;
8309 } else if (pendingRequest.frame_number == frame_number) {
8310 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8311 // Check if early AF state for trigger exists. If yes, send AF state as
8312 // partial result for better latency.
8313 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8314 pendingRequest.focusStateSent = true;
8315 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8316 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8317 frame_number, fwkEarlyAfState);
8318 }
8319 }
8320 }
8321 }
8322 }
8323 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8324 &mAfTrigger.trigger, 1);
8325 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8326
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008327 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8328 /*af regions*/
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008329 cam_rect_t hAfRect = hAfRegions->rect;
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008330 int32_t afRegions[REGIONS_TUPLE_COUNT];
8331 // Adjust crop region from sensor output coordinate system to active
8332 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008333 mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8334 hAfRect.width, hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008335
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008336 convertToRegions(hAfRect, afRegions, hAfRegions->weight);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008337 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8338 REGIONS_TUPLE_COUNT);
8339 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8340 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008341 hAfRect.left, hAfRect.top, hAfRect.width,
8342 hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008343 }
8344
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008345 // AF region confidence
8346 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8347 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8348 }
8349
Thierry Strudel3d639192016-09-09 11:52:26 -07008350 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8351 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8352 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8353 if (NAME_NOT_FOUND != val) {
8354 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8355 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8356 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8357 } else {
8358 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8359 }
8360 }
8361
8362 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8363 uint32_t aeMode = CAM_AE_MODE_MAX;
8364 int32_t flashMode = CAM_FLASH_MODE_MAX;
8365 int32_t redeye = -1;
8366 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8367 aeMode = *pAeMode;
8368 }
8369 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8370 flashMode = *pFlashMode;
8371 }
8372 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8373 redeye = *pRedeye;
8374 }
8375
8376 if (1 == redeye) {
8377 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8378 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8379 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8380 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8381 flashMode);
8382 if (NAME_NOT_FOUND != val) {
8383 fwk_aeMode = (uint8_t)val;
8384 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8385 } else {
8386 LOGE("Unsupported flash mode %d", flashMode);
8387 }
8388 } else if (aeMode == CAM_AE_MODE_ON) {
8389 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8390 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8391 } else if (aeMode == CAM_AE_MODE_OFF) {
8392 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8393 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008394 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8395 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8396 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008397 } else {
8398 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8399 "flashMode:%d, aeMode:%u!!!",
8400 redeye, flashMode, aeMode);
8401 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008402 if (mInstantAEC) {
8403 // Increment frame Idx count untill a bound reached for instant AEC.
8404 mInstantAecFrameIdxCount++;
8405 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8406 CAM_INTF_META_AEC_INFO, metadata) {
8407 LOGH("ae_params->settled = %d",ae_params->settled);
8408 // If AEC settled, or if number of frames reached bound value,
8409 // should reset instant AEC.
8410 if (ae_params->settled ||
8411 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8412 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8413 mInstantAEC = false;
8414 mResetInstantAEC = true;
8415 mInstantAecFrameIdxCount = 0;
8416 }
8417 }
8418 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008419 resultMetadata = camMetadata.release();
8420 return resultMetadata;
8421}
8422
8423/*===========================================================================
8424 * FUNCTION : dumpMetadataToFile
8425 *
8426 * DESCRIPTION: Dumps tuning metadata to file system
8427 *
8428 * PARAMETERS :
8429 * @meta : tuning metadata
8430 * @dumpFrameCount : current dump frame count
8431 * @enabled : Enable mask
8432 *
8433 *==========================================================================*/
8434void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8435 uint32_t &dumpFrameCount,
8436 bool enabled,
8437 const char *type,
8438 uint32_t frameNumber)
8439{
8440 //Some sanity checks
8441 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8442 LOGE("Tuning sensor data size bigger than expected %d: %d",
8443 meta.tuning_sensor_data_size,
8444 TUNING_SENSOR_DATA_MAX);
8445 return;
8446 }
8447
8448 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8449 LOGE("Tuning VFE data size bigger than expected %d: %d",
8450 meta.tuning_vfe_data_size,
8451 TUNING_VFE_DATA_MAX);
8452 return;
8453 }
8454
8455 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8456 LOGE("Tuning CPP data size bigger than expected %d: %d",
8457 meta.tuning_cpp_data_size,
8458 TUNING_CPP_DATA_MAX);
8459 return;
8460 }
8461
8462 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8463 LOGE("Tuning CAC data size bigger than expected %d: %d",
8464 meta.tuning_cac_data_size,
8465 TUNING_CAC_DATA_MAX);
8466 return;
8467 }
8468 //
8469
8470 if(enabled){
8471 char timeBuf[FILENAME_MAX];
8472 char buf[FILENAME_MAX];
8473 memset(buf, 0, sizeof(buf));
8474 memset(timeBuf, 0, sizeof(timeBuf));
8475 time_t current_time;
8476 struct tm * timeinfo;
8477 time (&current_time);
8478 timeinfo = localtime (&current_time);
8479 if (timeinfo != NULL) {
8480 strftime (timeBuf, sizeof(timeBuf),
8481 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8482 }
8483 String8 filePath(timeBuf);
8484 snprintf(buf,
8485 sizeof(buf),
8486 "%dm_%s_%d.bin",
8487 dumpFrameCount,
8488 type,
8489 frameNumber);
8490 filePath.append(buf);
8491 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8492 if (file_fd >= 0) {
8493 ssize_t written_len = 0;
8494 meta.tuning_data_version = TUNING_DATA_VERSION;
8495 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8496 written_len += write(file_fd, data, sizeof(uint32_t));
8497 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8498 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8499 written_len += write(file_fd, data, sizeof(uint32_t));
8500 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8501 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8502 written_len += write(file_fd, data, sizeof(uint32_t));
8503 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8504 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8505 written_len += write(file_fd, data, sizeof(uint32_t));
8506 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8507 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8508 written_len += write(file_fd, data, sizeof(uint32_t));
8509 meta.tuning_mod3_data_size = 0;
8510 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8511 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8512 written_len += write(file_fd, data, sizeof(uint32_t));
8513 size_t total_size = meta.tuning_sensor_data_size;
8514 data = (void *)((uint8_t *)&meta.data);
8515 written_len += write(file_fd, data, total_size);
8516 total_size = meta.tuning_vfe_data_size;
8517 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8518 written_len += write(file_fd, data, total_size);
8519 total_size = meta.tuning_cpp_data_size;
8520 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8521 written_len += write(file_fd, data, total_size);
8522 total_size = meta.tuning_cac_data_size;
8523 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8524 written_len += write(file_fd, data, total_size);
8525 close(file_fd);
8526 }else {
8527 LOGE("fail to open file for metadata dumping");
8528 }
8529 }
8530}
8531
8532/*===========================================================================
8533 * FUNCTION : cleanAndSortStreamInfo
8534 *
8535 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8536 * and sort them such that raw stream is at the end of the list
8537 * This is a workaround for camera daemon constraint.
8538 *
8539 * PARAMETERS : None
8540 *
8541 *==========================================================================*/
8542void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8543{
8544 List<stream_info_t *> newStreamInfo;
8545
8546 /*clean up invalid streams*/
8547 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8548 it != mStreamInfo.end();) {
8549 if(((*it)->status) == INVALID){
8550 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8551 delete channel;
8552 free(*it);
8553 it = mStreamInfo.erase(it);
8554 } else {
8555 it++;
8556 }
8557 }
8558
8559 // Move preview/video/callback/snapshot streams into newList
8560 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8561 it != mStreamInfo.end();) {
8562 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8563 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8564 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8565 newStreamInfo.push_back(*it);
8566 it = mStreamInfo.erase(it);
8567 } else
8568 it++;
8569 }
8570 // Move raw streams into newList
8571 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8572 it != mStreamInfo.end();) {
8573 newStreamInfo.push_back(*it);
8574 it = mStreamInfo.erase(it);
8575 }
8576
8577 mStreamInfo = newStreamInfo;
8578}
8579
8580/*===========================================================================
8581 * FUNCTION : extractJpegMetadata
8582 *
8583 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8584 * JPEG metadata is cached in HAL, and return as part of capture
8585 * result when metadata is returned from camera daemon.
8586 *
8587 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8588 * @request: capture request
8589 *
8590 *==========================================================================*/
8591void QCamera3HardwareInterface::extractJpegMetadata(
8592 CameraMetadata& jpegMetadata,
8593 const camera3_capture_request_t *request)
8594{
8595 CameraMetadata frame_settings;
8596 frame_settings = request->settings;
8597
8598 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8599 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8600 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8601 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8602
8603 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8604 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8605 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8606 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8607
8608 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8609 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8610 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8611 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8612
8613 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8614 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8615 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8616 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8617
8618 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8619 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8620 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8621 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8622
8623 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8624 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8625 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8626 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8627
8628 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8629 int32_t thumbnail_size[2];
8630 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8631 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8632 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8633 int32_t orientation =
8634 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008635 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008636 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8637 int32_t temp;
8638 temp = thumbnail_size[0];
8639 thumbnail_size[0] = thumbnail_size[1];
8640 thumbnail_size[1] = temp;
8641 }
8642 }
8643 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8644 thumbnail_size,
8645 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8646 }
8647
8648}
8649
8650/*===========================================================================
8651 * FUNCTION : convertToRegions
8652 *
8653 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8654 *
8655 * PARAMETERS :
8656 * @rect : cam_rect_t struct to convert
8657 * @region : int32_t destination array
8658 * @weight : if we are converting from cam_area_t, weight is valid
8659 * else weight = -1
8660 *
8661 *==========================================================================*/
8662void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8663 int32_t *region, int weight)
8664{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008665 region[FACE_LEFT] = rect.left;
8666 region[FACE_TOP] = rect.top;
8667 region[FACE_RIGHT] = rect.left + rect.width;
8668 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008669 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008670 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008671 }
8672}
8673
8674/*===========================================================================
8675 * FUNCTION : convertFromRegions
8676 *
8677 * DESCRIPTION: helper method to convert from array to cam_rect_t
8678 *
8679 * PARAMETERS :
8680 * @rect : cam_rect_t struct to convert
8681 * @region : int32_t destination array
8682 * @weight : if we are converting from cam_area_t, weight is valid
8683 * else weight = -1
8684 *
8685 *==========================================================================*/
8686void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008687 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008688{
Thierry Strudel3d639192016-09-09 11:52:26 -07008689 int32_t x_min = frame_settings.find(tag).data.i32[0];
8690 int32_t y_min = frame_settings.find(tag).data.i32[1];
8691 int32_t x_max = frame_settings.find(tag).data.i32[2];
8692 int32_t y_max = frame_settings.find(tag).data.i32[3];
8693 roi.weight = frame_settings.find(tag).data.i32[4];
8694 roi.rect.left = x_min;
8695 roi.rect.top = y_min;
8696 roi.rect.width = x_max - x_min;
8697 roi.rect.height = y_max - y_min;
8698}
8699
8700/*===========================================================================
8701 * FUNCTION : resetIfNeededROI
8702 *
8703 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8704 * crop region
8705 *
8706 * PARAMETERS :
8707 * @roi : cam_area_t struct to resize
8708 * @scalerCropRegion : cam_crop_region_t region to compare against
8709 *
8710 *
8711 *==========================================================================*/
8712bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8713 const cam_crop_region_t* scalerCropRegion)
8714{
8715 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8716 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8717 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8718 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8719
8720 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8721 * without having this check the calculations below to validate if the roi
8722 * is inside scalar crop region will fail resulting in the roi not being
8723 * reset causing algorithm to continue to use stale roi window
8724 */
8725 if (roi->weight == 0) {
8726 return true;
8727 }
8728
8729 if ((roi_x_max < scalerCropRegion->left) ||
8730 // right edge of roi window is left of scalar crop's left edge
8731 (roi_y_max < scalerCropRegion->top) ||
8732 // bottom edge of roi window is above scalar crop's top edge
8733 (roi->rect.left > crop_x_max) ||
8734 // left edge of roi window is beyond(right) of scalar crop's right edge
8735 (roi->rect.top > crop_y_max)){
8736 // top edge of roi windo is above scalar crop's top edge
8737 return false;
8738 }
8739 if (roi->rect.left < scalerCropRegion->left) {
8740 roi->rect.left = scalerCropRegion->left;
8741 }
8742 if (roi->rect.top < scalerCropRegion->top) {
8743 roi->rect.top = scalerCropRegion->top;
8744 }
8745 if (roi_x_max > crop_x_max) {
8746 roi_x_max = crop_x_max;
8747 }
8748 if (roi_y_max > crop_y_max) {
8749 roi_y_max = crop_y_max;
8750 }
8751 roi->rect.width = roi_x_max - roi->rect.left;
8752 roi->rect.height = roi_y_max - roi->rect.top;
8753 return true;
8754}
8755
8756/*===========================================================================
8757 * FUNCTION : convertLandmarks
8758 *
8759 * DESCRIPTION: helper method to extract the landmarks from face detection info
8760 *
8761 * PARAMETERS :
8762 * @landmark_data : input landmark data to be converted
8763 * @landmarks : int32_t destination array
8764 *
8765 *
8766 *==========================================================================*/
8767void QCamera3HardwareInterface::convertLandmarks(
8768 cam_face_landmarks_info_t landmark_data,
8769 int32_t *landmarks)
8770{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008771 if (landmark_data.is_left_eye_valid) {
8772 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8773 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8774 } else {
8775 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8776 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8777 }
8778
8779 if (landmark_data.is_right_eye_valid) {
8780 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8781 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8782 } else {
8783 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8784 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8785 }
8786
8787 if (landmark_data.is_mouth_valid) {
8788 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8789 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8790 } else {
8791 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8792 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8793 }
8794}
8795
8796/*===========================================================================
8797 * FUNCTION : setInvalidLandmarks
8798 *
8799 * DESCRIPTION: helper method to set invalid landmarks
8800 *
8801 * PARAMETERS :
8802 * @landmarks : int32_t destination array
8803 *
8804 *
8805 *==========================================================================*/
8806void QCamera3HardwareInterface::setInvalidLandmarks(
8807 int32_t *landmarks)
8808{
8809 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8810 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8811 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8812 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8813 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8814 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008815}
8816
8817#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008818
8819/*===========================================================================
8820 * FUNCTION : getCapabilities
8821 *
8822 * DESCRIPTION: query camera capability from back-end
8823 *
8824 * PARAMETERS :
8825 * @ops : mm-interface ops structure
8826 * @cam_handle : camera handle for which we need capability
8827 *
8828 * RETURN : ptr type of capability structure
8829 * capability for success
8830 * NULL for failure
8831 *==========================================================================*/
8832cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8833 uint32_t cam_handle)
8834{
8835 int rc = NO_ERROR;
8836 QCamera3HeapMemory *capabilityHeap = NULL;
8837 cam_capability_t *cap_ptr = NULL;
8838
8839 if (ops == NULL) {
8840 LOGE("Invalid arguments");
8841 return NULL;
8842 }
8843
8844 capabilityHeap = new QCamera3HeapMemory(1);
8845 if (capabilityHeap == NULL) {
8846 LOGE("creation of capabilityHeap failed");
8847 return NULL;
8848 }
8849
8850 /* Allocate memory for capability buffer */
8851 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8852 if(rc != OK) {
8853 LOGE("No memory for cappability");
8854 goto allocate_failed;
8855 }
8856
8857 /* Map memory for capability buffer */
8858 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8859
8860 rc = ops->map_buf(cam_handle,
8861 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8862 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8863 if(rc < 0) {
8864 LOGE("failed to map capability buffer");
8865 rc = FAILED_TRANSACTION;
8866 goto map_failed;
8867 }
8868
8869 /* Query Capability */
8870 rc = ops->query_capability(cam_handle);
8871 if(rc < 0) {
8872 LOGE("failed to query capability");
8873 rc = FAILED_TRANSACTION;
8874 goto query_failed;
8875 }
8876
8877 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8878 if (cap_ptr == NULL) {
8879 LOGE("out of memory");
8880 rc = NO_MEMORY;
8881 goto query_failed;
8882 }
8883
8884 memset(cap_ptr, 0, sizeof(cam_capability_t));
8885 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8886
8887 int index;
8888 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8889 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8890 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8891 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8892 }
8893
8894query_failed:
8895 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8896map_failed:
8897 capabilityHeap->deallocate();
8898allocate_failed:
8899 delete capabilityHeap;
8900
8901 if (rc != NO_ERROR) {
8902 return NULL;
8903 } else {
8904 return cap_ptr;
8905 }
8906}
8907
Thierry Strudel3d639192016-09-09 11:52:26 -07008908/*===========================================================================
8909 * FUNCTION : initCapabilities
8910 *
8911 * DESCRIPTION: initialize camera capabilities in static data struct
8912 *
8913 * PARAMETERS :
8914 * @cameraId : camera Id
8915 *
8916 * RETURN : int32_t type of status
8917 * NO_ERROR -- success
8918 * none-zero failure code
8919 *==========================================================================*/
8920int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8921{
8922 int rc = 0;
8923 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008924 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008925
8926 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8927 if (rc) {
8928 LOGE("camera_open failed. rc = %d", rc);
8929 goto open_failed;
8930 }
8931 if (!cameraHandle) {
8932 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8933 goto open_failed;
8934 }
8935
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008936 handle = get_main_camera_handle(cameraHandle->camera_handle);
8937 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8938 if (gCamCapability[cameraId] == NULL) {
8939 rc = FAILED_TRANSACTION;
8940 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008941 }
8942
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008943 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008944 if (is_dual_camera_by_idx(cameraId)) {
8945 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8946 gCamCapability[cameraId]->aux_cam_cap =
8947 getCapabilities(cameraHandle->ops, handle);
8948 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8949 rc = FAILED_TRANSACTION;
8950 free(gCamCapability[cameraId]);
8951 goto failed_op;
8952 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008953
8954 // Copy the main camera capability to main_cam_cap struct
8955 gCamCapability[cameraId]->main_cam_cap =
8956 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8957 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8958 LOGE("out of memory");
8959 rc = NO_MEMORY;
8960 goto failed_op;
8961 }
8962 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8963 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008964 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008965failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008966 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8967 cameraHandle = NULL;
8968open_failed:
8969 return rc;
8970}
8971
8972/*==========================================================================
8973 * FUNCTION : get3Aversion
8974 *
8975 * DESCRIPTION: get the Q3A S/W version
8976 *
8977 * PARAMETERS :
8978 * @sw_version: Reference of Q3A structure which will hold version info upon
8979 * return
8980 *
8981 * RETURN : None
8982 *
8983 *==========================================================================*/
8984void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8985{
8986 if(gCamCapability[mCameraId])
8987 sw_version = gCamCapability[mCameraId]->q3a_version;
8988 else
8989 LOGE("Capability structure NULL!");
8990}
8991
8992
8993/*===========================================================================
8994 * FUNCTION : initParameters
8995 *
8996 * DESCRIPTION: initialize camera parameters
8997 *
8998 * PARAMETERS :
8999 *
9000 * RETURN : int32_t type of status
9001 * NO_ERROR -- success
9002 * none-zero failure code
9003 *==========================================================================*/
9004int QCamera3HardwareInterface::initParameters()
9005{
9006 int rc = 0;
9007
9008 //Allocate Set Param Buffer
9009 mParamHeap = new QCamera3HeapMemory(1);
9010 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9011 if(rc != OK) {
9012 rc = NO_MEMORY;
9013 LOGE("Failed to allocate SETPARM Heap memory");
9014 delete mParamHeap;
9015 mParamHeap = NULL;
9016 return rc;
9017 }
9018
9019 //Map memory for parameters buffer
9020 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9021 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9022 mParamHeap->getFd(0),
9023 sizeof(metadata_buffer_t),
9024 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9025 if(rc < 0) {
9026 LOGE("failed to map SETPARM buffer");
9027 rc = FAILED_TRANSACTION;
9028 mParamHeap->deallocate();
9029 delete mParamHeap;
9030 mParamHeap = NULL;
9031 return rc;
9032 }
9033
9034 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9035
9036 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9037 return rc;
9038}
9039
9040/*===========================================================================
9041 * FUNCTION : deinitParameters
9042 *
9043 * DESCRIPTION: de-initialize camera parameters
9044 *
9045 * PARAMETERS :
9046 *
9047 * RETURN : NONE
9048 *==========================================================================*/
9049void QCamera3HardwareInterface::deinitParameters()
9050{
9051 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9052 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9053
9054 mParamHeap->deallocate();
9055 delete mParamHeap;
9056 mParamHeap = NULL;
9057
9058 mParameters = NULL;
9059
9060 free(mPrevParameters);
9061 mPrevParameters = NULL;
9062}
9063
9064/*===========================================================================
9065 * FUNCTION : calcMaxJpegSize
9066 *
9067 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9068 *
9069 * PARAMETERS :
9070 *
9071 * RETURN : max_jpeg_size
9072 *==========================================================================*/
9073size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9074{
9075 size_t max_jpeg_size = 0;
9076 size_t temp_width, temp_height;
9077 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9078 MAX_SIZES_CNT);
9079 for (size_t i = 0; i < count; i++) {
9080 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9081 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9082 if (temp_width * temp_height > max_jpeg_size ) {
9083 max_jpeg_size = temp_width * temp_height;
9084 }
9085 }
9086 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9087 return max_jpeg_size;
9088}
9089
9090/*===========================================================================
9091 * FUNCTION : getMaxRawSize
9092 *
9093 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9094 *
9095 * PARAMETERS :
9096 *
9097 * RETURN : Largest supported Raw Dimension
9098 *==========================================================================*/
9099cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9100{
9101 int max_width = 0;
9102 cam_dimension_t maxRawSize;
9103
9104 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9105 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9106 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9107 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9108 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9109 }
9110 }
9111 return maxRawSize;
9112}
9113
9114
9115/*===========================================================================
9116 * FUNCTION : calcMaxJpegDim
9117 *
9118 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9119 *
9120 * PARAMETERS :
9121 *
9122 * RETURN : max_jpeg_dim
9123 *==========================================================================*/
9124cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9125{
9126 cam_dimension_t max_jpeg_dim;
9127 cam_dimension_t curr_jpeg_dim;
9128 max_jpeg_dim.width = 0;
9129 max_jpeg_dim.height = 0;
9130 curr_jpeg_dim.width = 0;
9131 curr_jpeg_dim.height = 0;
9132 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9133 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9134 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9135 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9136 max_jpeg_dim.width * max_jpeg_dim.height ) {
9137 max_jpeg_dim.width = curr_jpeg_dim.width;
9138 max_jpeg_dim.height = curr_jpeg_dim.height;
9139 }
9140 }
9141 return max_jpeg_dim;
9142}
9143
9144/*===========================================================================
9145 * FUNCTION : addStreamConfig
9146 *
9147 * DESCRIPTION: adds the stream configuration to the array
9148 *
9149 * PARAMETERS :
9150 * @available_stream_configs : pointer to stream configuration array
9151 * @scalar_format : scalar format
9152 * @dim : configuration dimension
9153 * @config_type : input or output configuration type
9154 *
9155 * RETURN : NONE
9156 *==========================================================================*/
9157void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9158 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9159{
9160 available_stream_configs.add(scalar_format);
9161 available_stream_configs.add(dim.width);
9162 available_stream_configs.add(dim.height);
9163 available_stream_configs.add(config_type);
9164}
9165
9166/*===========================================================================
9167 * FUNCTION : suppportBurstCapture
9168 *
9169 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9170 *
9171 * PARAMETERS :
9172 * @cameraId : camera Id
9173 *
9174 * RETURN : true if camera supports BURST_CAPTURE
9175 * false otherwise
9176 *==========================================================================*/
9177bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9178{
9179 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9180 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9181 const int32_t highResWidth = 3264;
9182 const int32_t highResHeight = 2448;
9183
9184 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9185 // Maximum resolution images cannot be captured at >= 10fps
9186 // -> not supporting BURST_CAPTURE
9187 return false;
9188 }
9189
9190 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9191 // Maximum resolution images can be captured at >= 20fps
9192 // --> supporting BURST_CAPTURE
9193 return true;
9194 }
9195
9196 // Find the smallest highRes resolution, or largest resolution if there is none
9197 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9198 MAX_SIZES_CNT);
9199 size_t highRes = 0;
9200 while ((highRes + 1 < totalCnt) &&
9201 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9202 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9203 highResWidth * highResHeight)) {
9204 highRes++;
9205 }
9206 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9207 return true;
9208 } else {
9209 return false;
9210 }
9211}
9212
9213/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009214 * FUNCTION : getPDStatIndex
9215 *
9216 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9217 *
9218 * PARAMETERS :
9219 * @caps : camera capabilities
9220 *
9221 * RETURN : int32_t type
9222 * non-negative - on success
9223 * -1 - on failure
9224 *==========================================================================*/
9225int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9226 if (nullptr == caps) {
9227 return -1;
9228 }
9229
9230 uint32_t metaRawCount = caps->meta_raw_channel_count;
9231 int32_t ret = -1;
9232 for (size_t i = 0; i < metaRawCount; i++) {
9233 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9234 ret = i;
9235 break;
9236 }
9237 }
9238
9239 return ret;
9240}
9241
9242/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009243 * FUNCTION : initStaticMetadata
9244 *
9245 * DESCRIPTION: initialize the static metadata
9246 *
9247 * PARAMETERS :
9248 * @cameraId : camera Id
9249 *
9250 * RETURN : int32_t type of status
9251 * 0 -- success
9252 * non-zero failure code
9253 *==========================================================================*/
9254int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9255{
9256 int rc = 0;
9257 CameraMetadata staticInfo;
9258 size_t count = 0;
9259 bool limitedDevice = false;
9260 char prop[PROPERTY_VALUE_MAX];
9261 bool supportBurst = false;
9262
9263 supportBurst = supportBurstCapture(cameraId);
9264
9265 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9266 * guaranteed or if min fps of max resolution is less than 20 fps, its
9267 * advertised as limited device*/
9268 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9269 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9270 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9271 !supportBurst;
9272
9273 uint8_t supportedHwLvl = limitedDevice ?
9274 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009275#ifndef USE_HAL_3_3
9276 // LEVEL_3 - This device will support level 3.
9277 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9278#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009279 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009280#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009281
9282 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9283 &supportedHwLvl, 1);
9284
9285 bool facingBack = false;
9286 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9287 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9288 facingBack = true;
9289 }
9290 /*HAL 3 only*/
9291 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9292 &gCamCapability[cameraId]->min_focus_distance, 1);
9293
9294 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9295 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9296
9297 /*should be using focal lengths but sensor doesn't provide that info now*/
9298 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9299 &gCamCapability[cameraId]->focal_length,
9300 1);
9301
9302 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9303 gCamCapability[cameraId]->apertures,
9304 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9305
9306 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9307 gCamCapability[cameraId]->filter_densities,
9308 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9309
9310
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009311 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9312 size_t mode_count =
9313 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9314 for (size_t i = 0; i < mode_count; i++) {
9315 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9316 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009317 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009318 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009319
9320 int32_t lens_shading_map_size[] = {
9321 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9322 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9323 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9324 lens_shading_map_size,
9325 sizeof(lens_shading_map_size)/sizeof(int32_t));
9326
9327 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9328 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9329
9330 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9331 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9332
9333 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9334 &gCamCapability[cameraId]->max_frame_duration, 1);
9335
9336 camera_metadata_rational baseGainFactor = {
9337 gCamCapability[cameraId]->base_gain_factor.numerator,
9338 gCamCapability[cameraId]->base_gain_factor.denominator};
9339 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9340 &baseGainFactor, 1);
9341
9342 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9343 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9344
9345 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9346 gCamCapability[cameraId]->pixel_array_size.height};
9347 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9348 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9349
9350 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9351 gCamCapability[cameraId]->active_array_size.top,
9352 gCamCapability[cameraId]->active_array_size.width,
9353 gCamCapability[cameraId]->active_array_size.height};
9354 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9355 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9356
9357 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9358 &gCamCapability[cameraId]->white_level, 1);
9359
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009360 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9361 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9362 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009363 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009364 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009365
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009366#ifndef USE_HAL_3_3
9367 bool hasBlackRegions = false;
9368 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9369 LOGW("black_region_count: %d is bounded to %d",
9370 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9371 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9372 }
9373 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9374 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9375 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9376 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9377 }
9378 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9379 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9380 hasBlackRegions = true;
9381 }
9382#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009383 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9384 &gCamCapability[cameraId]->flash_charge_duration, 1);
9385
9386 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9387 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9388
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009389 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9390 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9391 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009392 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9393 &timestampSource, 1);
9394
Thierry Strudel54dc9782017-02-15 12:12:10 -08009395 //update histogram vendor data
9396 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009397 &gCamCapability[cameraId]->histogram_size, 1);
9398
Thierry Strudel54dc9782017-02-15 12:12:10 -08009399 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009400 &gCamCapability[cameraId]->max_histogram_count, 1);
9401
Shuzhen Wang14415f52016-11-16 18:26:18 -08009402 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9403 //so that app can request fewer number of bins than the maximum supported.
9404 std::vector<int32_t> histBins;
9405 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9406 histBins.push_back(maxHistBins);
9407 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9408 (maxHistBins & 0x1) == 0) {
9409 histBins.push_back(maxHistBins >> 1);
9410 maxHistBins >>= 1;
9411 }
9412 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9413 histBins.data(), histBins.size());
9414
Thierry Strudel3d639192016-09-09 11:52:26 -07009415 int32_t sharpness_map_size[] = {
9416 gCamCapability[cameraId]->sharpness_map_size.width,
9417 gCamCapability[cameraId]->sharpness_map_size.height};
9418
9419 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9420 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9421
9422 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9423 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9424
Emilian Peev0f3c3162017-03-15 12:57:46 +00009425 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9426 if (0 <= indexPD) {
9427 // Advertise PD stats data as part of the Depth capabilities
9428 int32_t depthWidth =
9429 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9430 int32_t depthHeight =
9431 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009432 int32_t depthStride =
9433 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009434 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9435 assert(0 < depthSamplesCount);
9436 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9437 &depthSamplesCount, 1);
9438
9439 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9440 depthHeight,
9441 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9442 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9443 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9444 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9445 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9446
9447 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9448 depthHeight, 33333333,
9449 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9450 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9451 depthMinDuration,
9452 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9453
9454 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9455 depthHeight, 0,
9456 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9457 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9458 depthStallDuration,
9459 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9460
9461 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9462 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009463
9464 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9465 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9466 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009467 }
9468
Thierry Strudel3d639192016-09-09 11:52:26 -07009469 int32_t scalar_formats[] = {
9470 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9471 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9472 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9473 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9474 HAL_PIXEL_FORMAT_RAW10,
9475 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009476 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9477 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9478 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009479
9480 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9481 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9482 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9483 count, MAX_SIZES_CNT, available_processed_sizes);
9484 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9485 available_processed_sizes, count * 2);
9486
9487 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9488 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9489 makeTable(gCamCapability[cameraId]->raw_dim,
9490 count, MAX_SIZES_CNT, available_raw_sizes);
9491 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9492 available_raw_sizes, count * 2);
9493
9494 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9495 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9496 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9497 count, MAX_SIZES_CNT, available_fps_ranges);
9498 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9499 available_fps_ranges, count * 2);
9500
9501 camera_metadata_rational exposureCompensationStep = {
9502 gCamCapability[cameraId]->exp_compensation_step.numerator,
9503 gCamCapability[cameraId]->exp_compensation_step.denominator};
9504 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9505 &exposureCompensationStep, 1);
9506
9507 Vector<uint8_t> availableVstabModes;
9508 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9509 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009510 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009511 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009512 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009513 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009514 count = IS_TYPE_MAX;
9515 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9516 for (size_t i = 0; i < count; i++) {
9517 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9518 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9519 eisSupported = true;
9520 break;
9521 }
9522 }
9523 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009524 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9525 }
9526 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9527 availableVstabModes.array(), availableVstabModes.size());
9528
9529 /*HAL 1 and HAL 3 common*/
9530 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9531 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9532 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009533 // Cap the max zoom to the max preferred value
9534 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009535 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9536 &maxZoom, 1);
9537
9538 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9539 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9540
9541 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9542 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9543 max3aRegions[2] = 0; /* AF not supported */
9544 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9545 max3aRegions, 3);
9546
9547 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9548 memset(prop, 0, sizeof(prop));
9549 property_get("persist.camera.facedetect", prop, "1");
9550 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9551 LOGD("Support face detection mode: %d",
9552 supportedFaceDetectMode);
9553
9554 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009555 /* support mode should be OFF if max number of face is 0 */
9556 if (maxFaces <= 0) {
9557 supportedFaceDetectMode = 0;
9558 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009559 Vector<uint8_t> availableFaceDetectModes;
9560 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9561 if (supportedFaceDetectMode == 1) {
9562 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9563 } else if (supportedFaceDetectMode == 2) {
9564 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9565 } else if (supportedFaceDetectMode == 3) {
9566 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9567 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9568 } else {
9569 maxFaces = 0;
9570 }
9571 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9572 availableFaceDetectModes.array(),
9573 availableFaceDetectModes.size());
9574 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9575 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009576 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9577 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9578 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009579
9580 int32_t exposureCompensationRange[] = {
9581 gCamCapability[cameraId]->exposure_compensation_min,
9582 gCamCapability[cameraId]->exposure_compensation_max};
9583 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9584 exposureCompensationRange,
9585 sizeof(exposureCompensationRange)/sizeof(int32_t));
9586
9587 uint8_t lensFacing = (facingBack) ?
9588 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9589 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9590
9591 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9592 available_thumbnail_sizes,
9593 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9594
9595 /*all sizes will be clubbed into this tag*/
9596 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9597 /*android.scaler.availableStreamConfigurations*/
9598 Vector<int32_t> available_stream_configs;
9599 cam_dimension_t active_array_dim;
9600 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9601 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009602
9603 /*advertise list of input dimensions supported based on below property.
9604 By default all sizes upto 5MP will be advertised.
9605 Note that the setprop resolution format should be WxH.
9606 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9607 To list all supported sizes, setprop needs to be set with "0x0" */
9608 cam_dimension_t minInputSize = {2592,1944}; //5MP
9609 memset(prop, 0, sizeof(prop));
9610 property_get("persist.camera.input.minsize", prop, "2592x1944");
9611 if (strlen(prop) > 0) {
9612 char *saveptr = NULL;
9613 char *token = strtok_r(prop, "x", &saveptr);
9614 if (token != NULL) {
9615 minInputSize.width = atoi(token);
9616 }
9617 token = strtok_r(NULL, "x", &saveptr);
9618 if (token != NULL) {
9619 minInputSize.height = atoi(token);
9620 }
9621 }
9622
Thierry Strudel3d639192016-09-09 11:52:26 -07009623 /* Add input/output stream configurations for each scalar formats*/
9624 for (size_t j = 0; j < scalar_formats_count; j++) {
9625 switch (scalar_formats[j]) {
9626 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9627 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9628 case HAL_PIXEL_FORMAT_RAW10:
9629 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9630 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9631 addStreamConfig(available_stream_configs, scalar_formats[j],
9632 gCamCapability[cameraId]->raw_dim[i],
9633 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9634 }
9635 break;
9636 case HAL_PIXEL_FORMAT_BLOB:
9637 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9638 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9639 addStreamConfig(available_stream_configs, scalar_formats[j],
9640 gCamCapability[cameraId]->picture_sizes_tbl[i],
9641 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9642 }
9643 break;
9644 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9645 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9646 default:
9647 cam_dimension_t largest_picture_size;
9648 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9649 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9650 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9651 addStreamConfig(available_stream_configs, scalar_formats[j],
9652 gCamCapability[cameraId]->picture_sizes_tbl[i],
9653 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009654 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009655 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9656 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009657 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9658 >= minInputSize.width) || (gCamCapability[cameraId]->
9659 picture_sizes_tbl[i].height >= minInputSize.height)) {
9660 addStreamConfig(available_stream_configs, scalar_formats[j],
9661 gCamCapability[cameraId]->picture_sizes_tbl[i],
9662 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9663 }
9664 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009665 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009666
Thierry Strudel3d639192016-09-09 11:52:26 -07009667 break;
9668 }
9669 }
9670
9671 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9672 available_stream_configs.array(), available_stream_configs.size());
9673 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9674 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9675
9676 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9677 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9678
9679 /* android.scaler.availableMinFrameDurations */
9680 Vector<int64_t> available_min_durations;
9681 for (size_t j = 0; j < scalar_formats_count; j++) {
9682 switch (scalar_formats[j]) {
9683 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9684 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9685 case HAL_PIXEL_FORMAT_RAW10:
9686 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9687 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9688 available_min_durations.add(scalar_formats[j]);
9689 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9690 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9691 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9692 }
9693 break;
9694 default:
9695 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9696 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9697 available_min_durations.add(scalar_formats[j]);
9698 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9699 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9700 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9701 }
9702 break;
9703 }
9704 }
9705 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9706 available_min_durations.array(), available_min_durations.size());
9707
9708 Vector<int32_t> available_hfr_configs;
9709 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9710 int32_t fps = 0;
9711 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9712 case CAM_HFR_MODE_60FPS:
9713 fps = 60;
9714 break;
9715 case CAM_HFR_MODE_90FPS:
9716 fps = 90;
9717 break;
9718 case CAM_HFR_MODE_120FPS:
9719 fps = 120;
9720 break;
9721 case CAM_HFR_MODE_150FPS:
9722 fps = 150;
9723 break;
9724 case CAM_HFR_MODE_180FPS:
9725 fps = 180;
9726 break;
9727 case CAM_HFR_MODE_210FPS:
9728 fps = 210;
9729 break;
9730 case CAM_HFR_MODE_240FPS:
9731 fps = 240;
9732 break;
9733 case CAM_HFR_MODE_480FPS:
9734 fps = 480;
9735 break;
9736 case CAM_HFR_MODE_OFF:
9737 case CAM_HFR_MODE_MAX:
9738 default:
9739 break;
9740 }
9741
9742 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9743 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9744 /* For each HFR frame rate, need to advertise one variable fps range
9745 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9746 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9747 * set by the app. When video recording is started, [120, 120] is
9748 * set. This way sensor configuration does not change when recording
9749 * is started */
9750
9751 /* (width, height, fps_min, fps_max, batch_size_max) */
9752 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9753 j < MAX_SIZES_CNT; j++) {
9754 available_hfr_configs.add(
9755 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9756 available_hfr_configs.add(
9757 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9758 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9759 available_hfr_configs.add(fps);
9760 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9761
9762 /* (width, height, fps_min, fps_max, batch_size_max) */
9763 available_hfr_configs.add(
9764 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9765 available_hfr_configs.add(
9766 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9767 available_hfr_configs.add(fps);
9768 available_hfr_configs.add(fps);
9769 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9770 }
9771 }
9772 }
9773 //Advertise HFR capability only if the property is set
9774 memset(prop, 0, sizeof(prop));
9775 property_get("persist.camera.hal3hfr.enable", prop, "1");
9776 uint8_t hfrEnable = (uint8_t)atoi(prop);
9777
9778 if(hfrEnable && available_hfr_configs.array()) {
9779 staticInfo.update(
9780 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9781 available_hfr_configs.array(), available_hfr_configs.size());
9782 }
9783
9784 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9785 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9786 &max_jpeg_size, 1);
9787
9788 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9789 size_t size = 0;
9790 count = CAM_EFFECT_MODE_MAX;
9791 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9792 for (size_t i = 0; i < count; i++) {
9793 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9794 gCamCapability[cameraId]->supported_effects[i]);
9795 if (NAME_NOT_FOUND != val) {
9796 avail_effects[size] = (uint8_t)val;
9797 size++;
9798 }
9799 }
9800 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9801 avail_effects,
9802 size);
9803
9804 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9805 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9806 size_t supported_scene_modes_cnt = 0;
9807 count = CAM_SCENE_MODE_MAX;
9808 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9809 for (size_t i = 0; i < count; i++) {
9810 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9811 CAM_SCENE_MODE_OFF) {
9812 int val = lookupFwkName(SCENE_MODES_MAP,
9813 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9814 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009815
Thierry Strudel3d639192016-09-09 11:52:26 -07009816 if (NAME_NOT_FOUND != val) {
9817 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9818 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9819 supported_scene_modes_cnt++;
9820 }
9821 }
9822 }
9823 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9824 avail_scene_modes,
9825 supported_scene_modes_cnt);
9826
9827 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9828 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9829 supported_scene_modes_cnt,
9830 CAM_SCENE_MODE_MAX,
9831 scene_mode_overrides,
9832 supported_indexes,
9833 cameraId);
9834
9835 if (supported_scene_modes_cnt == 0) {
9836 supported_scene_modes_cnt = 1;
9837 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9838 }
9839
9840 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9841 scene_mode_overrides, supported_scene_modes_cnt * 3);
9842
9843 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9844 ANDROID_CONTROL_MODE_AUTO,
9845 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9846 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9847 available_control_modes,
9848 3);
9849
9850 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9851 size = 0;
9852 count = CAM_ANTIBANDING_MODE_MAX;
9853 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9854 for (size_t i = 0; i < count; i++) {
9855 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9856 gCamCapability[cameraId]->supported_antibandings[i]);
9857 if (NAME_NOT_FOUND != val) {
9858 avail_antibanding_modes[size] = (uint8_t)val;
9859 size++;
9860 }
9861
9862 }
9863 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9864 avail_antibanding_modes,
9865 size);
9866
9867 uint8_t avail_abberation_modes[] = {
9868 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9869 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9870 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9871 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9872 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9873 if (0 == count) {
9874 // If no aberration correction modes are available for a device, this advertise OFF mode
9875 size = 1;
9876 } else {
9877 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9878 // So, advertize all 3 modes if atleast any one mode is supported as per the
9879 // new M requirement
9880 size = 3;
9881 }
9882 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9883 avail_abberation_modes,
9884 size);
9885
9886 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9887 size = 0;
9888 count = CAM_FOCUS_MODE_MAX;
9889 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9890 for (size_t i = 0; i < count; i++) {
9891 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9892 gCamCapability[cameraId]->supported_focus_modes[i]);
9893 if (NAME_NOT_FOUND != val) {
9894 avail_af_modes[size] = (uint8_t)val;
9895 size++;
9896 }
9897 }
9898 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9899 avail_af_modes,
9900 size);
9901
9902 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9903 size = 0;
9904 count = CAM_WB_MODE_MAX;
9905 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9906 for (size_t i = 0; i < count; i++) {
9907 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9908 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9909 gCamCapability[cameraId]->supported_white_balances[i]);
9910 if (NAME_NOT_FOUND != val) {
9911 avail_awb_modes[size] = (uint8_t)val;
9912 size++;
9913 }
9914 }
9915 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9916 avail_awb_modes,
9917 size);
9918
9919 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9920 count = CAM_FLASH_FIRING_LEVEL_MAX;
9921 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9922 count);
9923 for (size_t i = 0; i < count; i++) {
9924 available_flash_levels[i] =
9925 gCamCapability[cameraId]->supported_firing_levels[i];
9926 }
9927 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9928 available_flash_levels, count);
9929
9930 uint8_t flashAvailable;
9931 if (gCamCapability[cameraId]->flash_available)
9932 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9933 else
9934 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9935 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9936 &flashAvailable, 1);
9937
9938 Vector<uint8_t> avail_ae_modes;
9939 count = CAM_AE_MODE_MAX;
9940 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9941 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009942 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9943 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9944 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9945 }
9946 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009947 }
9948 if (flashAvailable) {
9949 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9950 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9951 }
9952 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9953 avail_ae_modes.array(),
9954 avail_ae_modes.size());
9955
9956 int32_t sensitivity_range[2];
9957 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9958 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9959 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9960 sensitivity_range,
9961 sizeof(sensitivity_range) / sizeof(int32_t));
9962
9963 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9964 &gCamCapability[cameraId]->max_analog_sensitivity,
9965 1);
9966
9967 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9968 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9969 &sensor_orientation,
9970 1);
9971
9972 int32_t max_output_streams[] = {
9973 MAX_STALLING_STREAMS,
9974 MAX_PROCESSED_STREAMS,
9975 MAX_RAW_STREAMS};
9976 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9977 max_output_streams,
9978 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9979
9980 uint8_t avail_leds = 0;
9981 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9982 &avail_leds, 0);
9983
9984 uint8_t focus_dist_calibrated;
9985 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9986 gCamCapability[cameraId]->focus_dist_calibrated);
9987 if (NAME_NOT_FOUND != val) {
9988 focus_dist_calibrated = (uint8_t)val;
9989 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9990 &focus_dist_calibrated, 1);
9991 }
9992
9993 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9994 size = 0;
9995 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9996 MAX_TEST_PATTERN_CNT);
9997 for (size_t i = 0; i < count; i++) {
9998 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9999 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10000 if (NAME_NOT_FOUND != testpatternMode) {
10001 avail_testpattern_modes[size] = testpatternMode;
10002 size++;
10003 }
10004 }
10005 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10006 avail_testpattern_modes,
10007 size);
10008
10009 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10010 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10011 &max_pipeline_depth,
10012 1);
10013
10014 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10015 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10016 &partial_result_count,
10017 1);
10018
10019 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10020 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10021
10022 Vector<uint8_t> available_capabilities;
10023 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10024 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10025 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10026 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10027 if (supportBurst) {
10028 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10029 }
10030 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10031 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10032 if (hfrEnable && available_hfr_configs.array()) {
10033 available_capabilities.add(
10034 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10035 }
10036
10037 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10038 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10039 }
10040 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10041 available_capabilities.array(),
10042 available_capabilities.size());
10043
10044 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10045 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10046 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10047 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10048
10049 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10050 &aeLockAvailable, 1);
10051
10052 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10053 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10054 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10055 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10056
10057 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10058 &awbLockAvailable, 1);
10059
10060 int32_t max_input_streams = 1;
10061 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10062 &max_input_streams,
10063 1);
10064
10065 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10066 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10067 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10068 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10069 HAL_PIXEL_FORMAT_YCbCr_420_888};
10070 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10071 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10072
10073 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10074 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10075 &max_latency,
10076 1);
10077
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010078#ifndef USE_HAL_3_3
10079 int32_t isp_sensitivity_range[2];
10080 isp_sensitivity_range[0] =
10081 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10082 isp_sensitivity_range[1] =
10083 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10084 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10085 isp_sensitivity_range,
10086 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10087#endif
10088
Thierry Strudel3d639192016-09-09 11:52:26 -070010089 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10090 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10091 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10092 available_hot_pixel_modes,
10093 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10094
10095 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10096 ANDROID_SHADING_MODE_FAST,
10097 ANDROID_SHADING_MODE_HIGH_QUALITY};
10098 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10099 available_shading_modes,
10100 3);
10101
10102 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10103 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10104 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10105 available_lens_shading_map_modes,
10106 2);
10107
10108 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10109 ANDROID_EDGE_MODE_FAST,
10110 ANDROID_EDGE_MODE_HIGH_QUALITY,
10111 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10112 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10113 available_edge_modes,
10114 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10115
10116 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10117 ANDROID_NOISE_REDUCTION_MODE_FAST,
10118 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10119 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10120 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10121 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10122 available_noise_red_modes,
10123 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10124
10125 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10126 ANDROID_TONEMAP_MODE_FAST,
10127 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10128 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10129 available_tonemap_modes,
10130 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10131
10132 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10133 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10134 available_hot_pixel_map_modes,
10135 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10136
10137 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10138 gCamCapability[cameraId]->reference_illuminant1);
10139 if (NAME_NOT_FOUND != val) {
10140 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10141 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10142 }
10143
10144 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10145 gCamCapability[cameraId]->reference_illuminant2);
10146 if (NAME_NOT_FOUND != val) {
10147 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10148 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10149 }
10150
10151 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10152 (void *)gCamCapability[cameraId]->forward_matrix1,
10153 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10154
10155 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10156 (void *)gCamCapability[cameraId]->forward_matrix2,
10157 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10158
10159 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10160 (void *)gCamCapability[cameraId]->color_transform1,
10161 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10162
10163 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10164 (void *)gCamCapability[cameraId]->color_transform2,
10165 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10166
10167 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10168 (void *)gCamCapability[cameraId]->calibration_transform1,
10169 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10170
10171 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10172 (void *)gCamCapability[cameraId]->calibration_transform2,
10173 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10174
10175 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10176 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10177 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10178 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10179 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10180 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10181 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10182 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10183 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10184 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10185 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10186 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10187 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10188 ANDROID_JPEG_GPS_COORDINATES,
10189 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10190 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10191 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10192 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10193 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10194 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10195 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10196 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10197 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10198 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010199#ifndef USE_HAL_3_3
10200 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10201#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010202 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010203 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010204 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10205 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010206 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010207 /* DevCamDebug metadata request_keys_basic */
10208 DEVCAMDEBUG_META_ENABLE,
10209 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010210 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010211 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010212 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010213 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010214 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010215 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010216
10217 size_t request_keys_cnt =
10218 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10219 Vector<int32_t> available_request_keys;
10220 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10221 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10222 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10223 }
10224
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010225 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010226 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010227 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070010228 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010229 }
10230
Thierry Strudel3d639192016-09-09 11:52:26 -070010231 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10232 available_request_keys.array(), available_request_keys.size());
10233
10234 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10235 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10236 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10237 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10238 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10239 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10240 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10241 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10242 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10243 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10244 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10245 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10246 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10247 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10248 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10249 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10250 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010251 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010252 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10253 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10254 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010255 ANDROID_STATISTICS_FACE_SCORES,
10256#ifndef USE_HAL_3_3
10257 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10258#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010259 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010260 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010261 // DevCamDebug metadata result_keys_basic
10262 DEVCAMDEBUG_META_ENABLE,
10263 // DevCamDebug metadata result_keys AF
10264 DEVCAMDEBUG_AF_LENS_POSITION,
10265 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10266 DEVCAMDEBUG_AF_TOF_DISTANCE,
10267 DEVCAMDEBUG_AF_LUMA,
10268 DEVCAMDEBUG_AF_HAF_STATE,
10269 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10270 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10271 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10272 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10273 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10274 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10275 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10276 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10277 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10278 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10279 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10280 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10281 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10282 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10283 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10284 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10285 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10286 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10287 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10288 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10289 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10290 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10291 // DevCamDebug metadata result_keys AEC
10292 DEVCAMDEBUG_AEC_TARGET_LUMA,
10293 DEVCAMDEBUG_AEC_COMP_LUMA,
10294 DEVCAMDEBUG_AEC_AVG_LUMA,
10295 DEVCAMDEBUG_AEC_CUR_LUMA,
10296 DEVCAMDEBUG_AEC_LINECOUNT,
10297 DEVCAMDEBUG_AEC_REAL_GAIN,
10298 DEVCAMDEBUG_AEC_EXP_INDEX,
10299 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010300 // DevCamDebug metadata result_keys zzHDR
10301 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10302 DEVCAMDEBUG_AEC_L_LINECOUNT,
10303 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10304 DEVCAMDEBUG_AEC_S_LINECOUNT,
10305 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10306 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10307 // DevCamDebug metadata result_keys ADRC
10308 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10309 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10310 DEVCAMDEBUG_AEC_GTM_RATIO,
10311 DEVCAMDEBUG_AEC_LTM_RATIO,
10312 DEVCAMDEBUG_AEC_LA_RATIO,
10313 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010314 // DevCamDebug metadata result_keys AEC MOTION
10315 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10316 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10317 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010318 // DevCamDebug metadata result_keys AWB
10319 DEVCAMDEBUG_AWB_R_GAIN,
10320 DEVCAMDEBUG_AWB_G_GAIN,
10321 DEVCAMDEBUG_AWB_B_GAIN,
10322 DEVCAMDEBUG_AWB_CCT,
10323 DEVCAMDEBUG_AWB_DECISION,
10324 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010325 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10326 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10327 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010328 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010329 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010330 };
10331
Thierry Strudel3d639192016-09-09 11:52:26 -070010332 size_t result_keys_cnt =
10333 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10334
10335 Vector<int32_t> available_result_keys;
10336 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10337 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10338 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10339 }
10340 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10341 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10342 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10343 }
10344 if (supportedFaceDetectMode == 1) {
10345 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10346 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10347 } else if ((supportedFaceDetectMode == 2) ||
10348 (supportedFaceDetectMode == 3)) {
10349 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10350 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10351 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010352#ifndef USE_HAL_3_3
10353 if (hasBlackRegions) {
10354 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10355 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10356 }
10357#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010358
10359 if (gExposeEnableZslKey) {
10360 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chendaf68892017-08-25 12:56:40 -070010361 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010362 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
10363 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010364 }
10365
Thierry Strudel3d639192016-09-09 11:52:26 -070010366 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10367 available_result_keys.array(), available_result_keys.size());
10368
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010369 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010370 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10371 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10372 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10373 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10374 ANDROID_SCALER_CROPPING_TYPE,
10375 ANDROID_SYNC_MAX_LATENCY,
10376 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10377 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10378 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10379 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10380 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10381 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10382 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10383 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10384 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10385 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10386 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10387 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10388 ANDROID_LENS_FACING,
10389 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10390 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10391 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10392 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10393 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10394 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10395 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10396 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10397 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10398 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10399 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10400 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10401 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10402 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10403 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10404 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10405 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10406 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10407 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10408 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010409 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010410 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10411 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10412 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10413 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10414 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10415 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10416 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10417 ANDROID_CONTROL_AVAILABLE_MODES,
10418 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10419 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10420 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10421 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010422 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10423#ifndef USE_HAL_3_3
10424 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10425 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10426#endif
10427 };
10428
10429 Vector<int32_t> available_characteristics_keys;
10430 available_characteristics_keys.appendArray(characteristics_keys_basic,
10431 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10432#ifndef USE_HAL_3_3
10433 if (hasBlackRegions) {
10434 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10435 }
10436#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010437
10438 if (0 <= indexPD) {
10439 int32_t depthKeys[] = {
10440 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10441 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10442 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10443 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10444 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10445 };
10446 available_characteristics_keys.appendArray(depthKeys,
10447 sizeof(depthKeys) / sizeof(depthKeys[0]));
10448 }
10449
Thierry Strudel3d639192016-09-09 11:52:26 -070010450 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010451 available_characteristics_keys.array(),
10452 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010453
10454 /*available stall durations depend on the hw + sw and will be different for different devices */
10455 /*have to add for raw after implementation*/
10456 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10457 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10458
10459 Vector<int64_t> available_stall_durations;
10460 for (uint32_t j = 0; j < stall_formats_count; j++) {
10461 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10462 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10463 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10464 available_stall_durations.add(stall_formats[j]);
10465 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10466 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10467 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10468 }
10469 } else {
10470 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10471 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10472 available_stall_durations.add(stall_formats[j]);
10473 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10474 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10475 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10476 }
10477 }
10478 }
10479 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10480 available_stall_durations.array(),
10481 available_stall_durations.size());
10482
10483 //QCAMERA3_OPAQUE_RAW
10484 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10485 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10486 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10487 case LEGACY_RAW:
10488 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10489 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10490 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10491 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10492 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10493 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10494 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10495 break;
10496 case MIPI_RAW:
10497 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10498 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10499 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10500 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10501 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10502 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10503 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10504 break;
10505 default:
10506 LOGE("unknown opaque_raw_format %d",
10507 gCamCapability[cameraId]->opaque_raw_fmt);
10508 break;
10509 }
10510 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10511
10512 Vector<int32_t> strides;
10513 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10514 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10515 cam_stream_buf_plane_info_t buf_planes;
10516 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10517 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10518 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10519 &gCamCapability[cameraId]->padding_info, &buf_planes);
10520 strides.add(buf_planes.plane_info.mp[0].stride);
10521 }
10522 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10523 strides.size());
10524
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010525 //TBD: remove the following line once backend advertises zzHDR in feature mask
10526 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010527 //Video HDR default
10528 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10529 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010530 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010531 int32_t vhdr_mode[] = {
10532 QCAMERA3_VIDEO_HDR_MODE_OFF,
10533 QCAMERA3_VIDEO_HDR_MODE_ON};
10534
10535 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10536 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10537 vhdr_mode, vhdr_mode_count);
10538 }
10539
Thierry Strudel3d639192016-09-09 11:52:26 -070010540 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10541 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10542 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10543
10544 uint8_t isMonoOnly =
10545 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10546 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10547 &isMonoOnly, 1);
10548
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010549#ifndef USE_HAL_3_3
10550 Vector<int32_t> opaque_size;
10551 for (size_t j = 0; j < scalar_formats_count; j++) {
10552 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10553 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10554 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10555 cam_stream_buf_plane_info_t buf_planes;
10556
10557 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10558 &gCamCapability[cameraId]->padding_info, &buf_planes);
10559
10560 if (rc == 0) {
10561 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10562 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10563 opaque_size.add(buf_planes.plane_info.frame_len);
10564 }else {
10565 LOGE("raw frame calculation failed!");
10566 }
10567 }
10568 }
10569 }
10570
10571 if ((opaque_size.size() > 0) &&
10572 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10573 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10574 else
10575 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10576#endif
10577
Thierry Strudel04e026f2016-10-10 11:27:36 -070010578 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10579 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10580 size = 0;
10581 count = CAM_IR_MODE_MAX;
10582 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10583 for (size_t i = 0; i < count; i++) {
10584 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10585 gCamCapability[cameraId]->supported_ir_modes[i]);
10586 if (NAME_NOT_FOUND != val) {
10587 avail_ir_modes[size] = (int32_t)val;
10588 size++;
10589 }
10590 }
10591 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10592 avail_ir_modes, size);
10593 }
10594
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010595 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10596 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10597 size = 0;
10598 count = CAM_AEC_CONVERGENCE_MAX;
10599 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10600 for (size_t i = 0; i < count; i++) {
10601 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10602 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10603 if (NAME_NOT_FOUND != val) {
10604 available_instant_aec_modes[size] = (int32_t)val;
10605 size++;
10606 }
10607 }
10608 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10609 available_instant_aec_modes, size);
10610 }
10611
Thierry Strudel54dc9782017-02-15 12:12:10 -080010612 int32_t sharpness_range[] = {
10613 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10614 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10615 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10616
10617 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10618 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10619 size = 0;
10620 count = CAM_BINNING_CORRECTION_MODE_MAX;
10621 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10622 for (size_t i = 0; i < count; i++) {
10623 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10624 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10625 gCamCapability[cameraId]->supported_binning_modes[i]);
10626 if (NAME_NOT_FOUND != val) {
10627 avail_binning_modes[size] = (int32_t)val;
10628 size++;
10629 }
10630 }
10631 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10632 avail_binning_modes, size);
10633 }
10634
10635 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10636 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10637 size = 0;
10638 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10639 for (size_t i = 0; i < count; i++) {
10640 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10641 gCamCapability[cameraId]->supported_aec_modes[i]);
10642 if (NAME_NOT_FOUND != val)
10643 available_aec_modes[size++] = val;
10644 }
10645 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10646 available_aec_modes, size);
10647 }
10648
10649 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10650 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10651 size = 0;
10652 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10653 for (size_t i = 0; i < count; i++) {
10654 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10655 gCamCapability[cameraId]->supported_iso_modes[i]);
10656 if (NAME_NOT_FOUND != val)
10657 available_iso_modes[size++] = val;
10658 }
10659 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10660 available_iso_modes, size);
10661 }
10662
10663 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010664 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010665 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10666 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10667 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10668
10669 int32_t available_saturation_range[4];
10670 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10671 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10672 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10673 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10674 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10675 available_saturation_range, 4);
10676
10677 uint8_t is_hdr_values[2];
10678 is_hdr_values[0] = 0;
10679 is_hdr_values[1] = 1;
10680 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10681 is_hdr_values, 2);
10682
10683 float is_hdr_confidence_range[2];
10684 is_hdr_confidence_range[0] = 0.0;
10685 is_hdr_confidence_range[1] = 1.0;
10686 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10687 is_hdr_confidence_range, 2);
10688
Emilian Peev0a972ef2017-03-16 10:25:53 +000010689 size_t eepromLength = strnlen(
10690 reinterpret_cast<const char *>(
10691 gCamCapability[cameraId]->eeprom_version_info),
10692 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10693 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010694 char easelInfo[] = ",E:N";
10695 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10696 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10697 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010698 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
Arnd Geis082a4d72017-08-24 10:33:07 -070010699 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-ver" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010700 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010701 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010702 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10703 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10704 }
10705
Thierry Strudel3d639192016-09-09 11:52:26 -070010706 gStaticMetadata[cameraId] = staticInfo.release();
10707 return rc;
10708}
10709
10710/*===========================================================================
10711 * FUNCTION : makeTable
10712 *
10713 * DESCRIPTION: make a table of sizes
10714 *
10715 * PARAMETERS :
10716 *
10717 *
10718 *==========================================================================*/
10719void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10720 size_t max_size, int32_t *sizeTable)
10721{
10722 size_t j = 0;
10723 if (size > max_size) {
10724 size = max_size;
10725 }
10726 for (size_t i = 0; i < size; i++) {
10727 sizeTable[j] = dimTable[i].width;
10728 sizeTable[j+1] = dimTable[i].height;
10729 j+=2;
10730 }
10731}
10732
10733/*===========================================================================
10734 * FUNCTION : makeFPSTable
10735 *
10736 * DESCRIPTION: make a table of fps ranges
10737 *
10738 * PARAMETERS :
10739 *
10740 *==========================================================================*/
10741void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10742 size_t max_size, int32_t *fpsRangesTable)
10743{
10744 size_t j = 0;
10745 if (size > max_size) {
10746 size = max_size;
10747 }
10748 for (size_t i = 0; i < size; i++) {
10749 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10750 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10751 j+=2;
10752 }
10753}
10754
10755/*===========================================================================
10756 * FUNCTION : makeOverridesList
10757 *
10758 * DESCRIPTION: make a list of scene mode overrides
10759 *
10760 * PARAMETERS :
10761 *
10762 *
10763 *==========================================================================*/
10764void QCamera3HardwareInterface::makeOverridesList(
10765 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10766 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10767{
10768 /*daemon will give a list of overrides for all scene modes.
10769 However we should send the fwk only the overrides for the scene modes
10770 supported by the framework*/
10771 size_t j = 0;
10772 if (size > max_size) {
10773 size = max_size;
10774 }
10775 size_t focus_count = CAM_FOCUS_MODE_MAX;
10776 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10777 focus_count);
10778 for (size_t i = 0; i < size; i++) {
10779 bool supt = false;
10780 size_t index = supported_indexes[i];
10781 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10782 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10783 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10784 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10785 overridesTable[index].awb_mode);
10786 if (NAME_NOT_FOUND != val) {
10787 overridesList[j+1] = (uint8_t)val;
10788 }
10789 uint8_t focus_override = overridesTable[index].af_mode;
10790 for (size_t k = 0; k < focus_count; k++) {
10791 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10792 supt = true;
10793 break;
10794 }
10795 }
10796 if (supt) {
10797 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10798 focus_override);
10799 if (NAME_NOT_FOUND != val) {
10800 overridesList[j+2] = (uint8_t)val;
10801 }
10802 } else {
10803 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10804 }
10805 j+=3;
10806 }
10807}
10808
10809/*===========================================================================
10810 * FUNCTION : filterJpegSizes
10811 *
10812 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10813 * could be downscaled to
10814 *
10815 * PARAMETERS :
10816 *
10817 * RETURN : length of jpegSizes array
10818 *==========================================================================*/
10819
10820size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10821 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10822 uint8_t downscale_factor)
10823{
10824 if (0 == downscale_factor) {
10825 downscale_factor = 1;
10826 }
10827
10828 int32_t min_width = active_array_size.width / downscale_factor;
10829 int32_t min_height = active_array_size.height / downscale_factor;
10830 size_t jpegSizesCnt = 0;
10831 if (processedSizesCnt > maxCount) {
10832 processedSizesCnt = maxCount;
10833 }
10834 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10835 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10836 jpegSizes[jpegSizesCnt] = processedSizes[i];
10837 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10838 jpegSizesCnt += 2;
10839 }
10840 }
10841 return jpegSizesCnt;
10842}
10843
10844/*===========================================================================
10845 * FUNCTION : computeNoiseModelEntryS
10846 *
10847 * DESCRIPTION: function to map a given sensitivity to the S noise
10848 * model parameters in the DNG noise model.
10849 *
10850 * PARAMETERS : sens : the sensor sensitivity
10851 *
10852 ** RETURN : S (sensor amplification) noise
10853 *
10854 *==========================================================================*/
10855double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10856 double s = gCamCapability[mCameraId]->gradient_S * sens +
10857 gCamCapability[mCameraId]->offset_S;
10858 return ((s < 0.0) ? 0.0 : s);
10859}
10860
10861/*===========================================================================
10862 * FUNCTION : computeNoiseModelEntryO
10863 *
10864 * DESCRIPTION: function to map a given sensitivity to the O noise
10865 * model parameters in the DNG noise model.
10866 *
10867 * PARAMETERS : sens : the sensor sensitivity
10868 *
10869 ** RETURN : O (sensor readout) noise
10870 *
10871 *==========================================================================*/
10872double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10873 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10874 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10875 1.0 : (1.0 * sens / max_analog_sens);
10876 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10877 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10878 return ((o < 0.0) ? 0.0 : o);
10879}
10880
10881/*===========================================================================
10882 * FUNCTION : getSensorSensitivity
10883 *
10884 * DESCRIPTION: convert iso_mode to an integer value
10885 *
10886 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10887 *
10888 ** RETURN : sensitivity supported by sensor
10889 *
10890 *==========================================================================*/
10891int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10892{
10893 int32_t sensitivity;
10894
10895 switch (iso_mode) {
10896 case CAM_ISO_MODE_100:
10897 sensitivity = 100;
10898 break;
10899 case CAM_ISO_MODE_200:
10900 sensitivity = 200;
10901 break;
10902 case CAM_ISO_MODE_400:
10903 sensitivity = 400;
10904 break;
10905 case CAM_ISO_MODE_800:
10906 sensitivity = 800;
10907 break;
10908 case CAM_ISO_MODE_1600:
10909 sensitivity = 1600;
10910 break;
10911 default:
10912 sensitivity = -1;
10913 break;
10914 }
10915 return sensitivity;
10916}
10917
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010918int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010919 if (gEaselManagerClient == nullptr) {
10920 gEaselManagerClient = EaselManagerClient::create();
10921 if (gEaselManagerClient == nullptr) {
10922 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10923 return -ENODEV;
10924 }
10925 }
10926
10927 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010928 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10929 // to connect to Easel.
10930 bool doNotpowerOnEasel =
10931 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10932
10933 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010934 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10935 return OK;
10936 }
10937
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010938 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010939 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010940 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010941 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010942 return res;
10943 }
10944
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010945 EaselManagerClientOpened = true;
10946
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010947 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010948 if (res != OK) {
10949 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10950 }
10951
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010952 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010953 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010954
10955 // Expose enableZsl key only when HDR+ mode is enabled.
10956 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010957 }
10958
10959 return OK;
10960}
10961
Thierry Strudel3d639192016-09-09 11:52:26 -070010962/*===========================================================================
10963 * FUNCTION : getCamInfo
10964 *
10965 * DESCRIPTION: query camera capabilities
10966 *
10967 * PARAMETERS :
10968 * @cameraId : camera Id
10969 * @info : camera info struct to be filled in with camera capabilities
10970 *
10971 * RETURN : int type of status
10972 * NO_ERROR -- success
10973 * none-zero failure code
10974 *==========================================================================*/
10975int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10976 struct camera_info *info)
10977{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010978 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010979 int rc = 0;
10980
10981 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010982
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010983 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070010984 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010985 rc = initHdrPlusClientLocked();
10986 if (rc != OK) {
10987 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10988 pthread_mutex_unlock(&gCamLock);
10989 return rc;
10990 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010991 }
10992
Thierry Strudel3d639192016-09-09 11:52:26 -070010993 if (NULL == gCamCapability[cameraId]) {
10994 rc = initCapabilities(cameraId);
10995 if (rc < 0) {
10996 pthread_mutex_unlock(&gCamLock);
10997 return rc;
10998 }
10999 }
11000
11001 if (NULL == gStaticMetadata[cameraId]) {
11002 rc = initStaticMetadata(cameraId);
11003 if (rc < 0) {
11004 pthread_mutex_unlock(&gCamLock);
11005 return rc;
11006 }
11007 }
11008
11009 switch(gCamCapability[cameraId]->position) {
11010 case CAM_POSITION_BACK:
11011 case CAM_POSITION_BACK_AUX:
11012 info->facing = CAMERA_FACING_BACK;
11013 break;
11014
11015 case CAM_POSITION_FRONT:
11016 case CAM_POSITION_FRONT_AUX:
11017 info->facing = CAMERA_FACING_FRONT;
11018 break;
11019
11020 default:
11021 LOGE("Unknown position type %d for camera id:%d",
11022 gCamCapability[cameraId]->position, cameraId);
11023 rc = -1;
11024 break;
11025 }
11026
11027
11028 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011029#ifndef USE_HAL_3_3
11030 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11031#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011032 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011033#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011034 info->static_camera_characteristics = gStaticMetadata[cameraId];
11035
11036 //For now assume both cameras can operate independently.
11037 info->conflicting_devices = NULL;
11038 info->conflicting_devices_length = 0;
11039
11040 //resource cost is 100 * MIN(1.0, m/M),
11041 //where m is throughput requirement with maximum stream configuration
11042 //and M is CPP maximum throughput.
11043 float max_fps = 0.0;
11044 for (uint32_t i = 0;
11045 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11046 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11047 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11048 }
11049 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11050 gCamCapability[cameraId]->active_array_size.width *
11051 gCamCapability[cameraId]->active_array_size.height * max_fps /
11052 gCamCapability[cameraId]->max_pixel_bandwidth;
11053 info->resource_cost = 100 * MIN(1.0, ratio);
11054 LOGI("camera %d resource cost is %d", cameraId,
11055 info->resource_cost);
11056
11057 pthread_mutex_unlock(&gCamLock);
11058 return rc;
11059}
11060
11061/*===========================================================================
11062 * FUNCTION : translateCapabilityToMetadata
11063 *
11064 * DESCRIPTION: translate the capability into camera_metadata_t
11065 *
11066 * PARAMETERS : type of the request
11067 *
11068 *
11069 * RETURN : success: camera_metadata_t*
11070 * failure: NULL
11071 *
11072 *==========================================================================*/
11073camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11074{
11075 if (mDefaultMetadata[type] != NULL) {
11076 return mDefaultMetadata[type];
11077 }
11078 //first time we are handling this request
11079 //fill up the metadata structure using the wrapper class
11080 CameraMetadata settings;
11081 //translate from cam_capability_t to camera_metadata_tag_t
11082 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11083 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11084 int32_t defaultRequestID = 0;
11085 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11086
11087 /* OIS disable */
11088 char ois_prop[PROPERTY_VALUE_MAX];
11089 memset(ois_prop, 0, sizeof(ois_prop));
11090 property_get("persist.camera.ois.disable", ois_prop, "0");
11091 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11092
11093 /* Force video to use OIS */
11094 char videoOisProp[PROPERTY_VALUE_MAX];
11095 memset(videoOisProp, 0, sizeof(videoOisProp));
11096 property_get("persist.camera.ois.video", videoOisProp, "1");
11097 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011098
11099 // Hybrid AE enable/disable
11100 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11101 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11102 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011103 uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011104
Thierry Strudel3d639192016-09-09 11:52:26 -070011105 uint8_t controlIntent = 0;
11106 uint8_t focusMode;
11107 uint8_t vsMode;
11108 uint8_t optStabMode;
11109 uint8_t cacMode;
11110 uint8_t edge_mode;
11111 uint8_t noise_red_mode;
11112 uint8_t tonemap_mode;
11113 bool highQualityModeEntryAvailable = FALSE;
11114 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011115 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011116 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11117 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011118 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011119 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011120 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011121
Thierry Strudel3d639192016-09-09 11:52:26 -070011122 switch (type) {
11123 case CAMERA3_TEMPLATE_PREVIEW:
11124 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11125 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11126 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11127 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11128 edge_mode = ANDROID_EDGE_MODE_FAST;
11129 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11130 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11131 break;
11132 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11133 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11134 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11135 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11136 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11137 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11138 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11139 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11140 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11141 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11142 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11143 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11144 highQualityModeEntryAvailable = TRUE;
11145 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11146 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11147 fastModeEntryAvailable = TRUE;
11148 }
11149 }
11150 if (highQualityModeEntryAvailable) {
11151 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11152 } else if (fastModeEntryAvailable) {
11153 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11154 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011155 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11156 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11157 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011158 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011159 break;
11160 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11161 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11162 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11163 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011164 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11165 edge_mode = ANDROID_EDGE_MODE_FAST;
11166 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11167 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11168 if (forceVideoOis)
11169 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11170 break;
11171 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11172 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11173 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11174 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011175 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11176 edge_mode = ANDROID_EDGE_MODE_FAST;
11177 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11178 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11179 if (forceVideoOis)
11180 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11181 break;
11182 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11183 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11184 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11185 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11186 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11187 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11188 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11189 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11190 break;
11191 case CAMERA3_TEMPLATE_MANUAL:
11192 edge_mode = ANDROID_EDGE_MODE_FAST;
11193 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11194 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11195 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11196 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11197 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11198 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11199 break;
11200 default:
11201 edge_mode = ANDROID_EDGE_MODE_FAST;
11202 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11203 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11204 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11205 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11206 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11207 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11208 break;
11209 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011210 // Set CAC to OFF if underlying device doesn't support
11211 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11212 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11213 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011214 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11215 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11216 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11217 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11218 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11219 }
11220 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011221 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011222 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011223
11224 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11225 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11226 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11227 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11228 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11229 || ois_disable)
11230 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11231 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011232 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011233
11234 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11235 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11236
11237 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11238 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11239
11240 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11241 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11242
11243 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11244 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11245
11246 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11247 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11248
11249 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11250 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11251
11252 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11253 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11254
11255 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11256 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11257
11258 /*flash*/
11259 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11260 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11261
11262 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11263 settings.update(ANDROID_FLASH_FIRING_POWER,
11264 &flashFiringLevel, 1);
11265
11266 /* lens */
11267 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11268 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11269
11270 if (gCamCapability[mCameraId]->filter_densities_count) {
11271 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11272 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11273 gCamCapability[mCameraId]->filter_densities_count);
11274 }
11275
11276 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11277 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11278
Thierry Strudel3d639192016-09-09 11:52:26 -070011279 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11280 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11281
11282 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11283 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11284
11285 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11286 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11287
11288 /* face detection (default to OFF) */
11289 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11290 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11291
Thierry Strudel54dc9782017-02-15 12:12:10 -080011292 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11293 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011294
11295 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11296 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11297
11298 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11299 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11300
Thierry Strudel3d639192016-09-09 11:52:26 -070011301
11302 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11303 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11304
11305 /* Exposure time(Update the Min Exposure Time)*/
11306 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11307 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11308
11309 /* frame duration */
11310 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11311 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11312
11313 /* sensitivity */
11314 static const int32_t default_sensitivity = 100;
11315 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011316#ifndef USE_HAL_3_3
11317 static const int32_t default_isp_sensitivity =
11318 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11319 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11320#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011321
11322 /*edge mode*/
11323 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11324
11325 /*noise reduction mode*/
11326 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11327
11328 /*color correction mode*/
11329 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11330 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11331
11332 /*transform matrix mode*/
11333 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11334
11335 int32_t scaler_crop_region[4];
11336 scaler_crop_region[0] = 0;
11337 scaler_crop_region[1] = 0;
11338 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11339 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11340 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11341
11342 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11343 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11344
11345 /*focus distance*/
11346 float focus_distance = 0.0;
11347 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11348
11349 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011350 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011351 float max_range = 0.0;
11352 float max_fixed_fps = 0.0;
11353 int32_t fps_range[2] = {0, 0};
11354 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11355 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011356 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11357 TEMPLATE_MAX_PREVIEW_FPS) {
11358 continue;
11359 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011360 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11361 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11362 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11363 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11364 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11365 if (range > max_range) {
11366 fps_range[0] =
11367 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11368 fps_range[1] =
11369 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11370 max_range = range;
11371 }
11372 } else {
11373 if (range < 0.01 && max_fixed_fps <
11374 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11375 fps_range[0] =
11376 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11377 fps_range[1] =
11378 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11379 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11380 }
11381 }
11382 }
11383 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11384
11385 /*precapture trigger*/
11386 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11387 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11388
11389 /*af trigger*/
11390 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11391 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11392
11393 /* ae & af regions */
11394 int32_t active_region[] = {
11395 gCamCapability[mCameraId]->active_array_size.left,
11396 gCamCapability[mCameraId]->active_array_size.top,
11397 gCamCapability[mCameraId]->active_array_size.left +
11398 gCamCapability[mCameraId]->active_array_size.width,
11399 gCamCapability[mCameraId]->active_array_size.top +
11400 gCamCapability[mCameraId]->active_array_size.height,
11401 0};
11402 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11403 sizeof(active_region) / sizeof(active_region[0]));
11404 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11405 sizeof(active_region) / sizeof(active_region[0]));
11406
11407 /* black level lock */
11408 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11409 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11410
Thierry Strudel3d639192016-09-09 11:52:26 -070011411 //special defaults for manual template
11412 if (type == CAMERA3_TEMPLATE_MANUAL) {
11413 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11414 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11415
11416 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11417 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11418
11419 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11420 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11421
11422 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11423 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11424
11425 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11426 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11427
11428 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11429 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11430 }
11431
11432
11433 /* TNR
11434 * We'll use this location to determine which modes TNR will be set.
11435 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11436 * This is not to be confused with linking on a per stream basis that decision
11437 * is still on per-session basis and will be handled as part of config stream
11438 */
11439 uint8_t tnr_enable = 0;
11440
11441 if (m_bTnrPreview || m_bTnrVideo) {
11442
11443 switch (type) {
11444 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11445 tnr_enable = 1;
11446 break;
11447
11448 default:
11449 tnr_enable = 0;
11450 break;
11451 }
11452
11453 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11454 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11455 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11456
11457 LOGD("TNR:%d with process plate %d for template:%d",
11458 tnr_enable, tnr_process_type, type);
11459 }
11460
11461 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011462 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011463 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11464
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011465 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011466 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11467
Shuzhen Wang920ea402017-05-03 08:49:39 -070011468 uint8_t related_camera_id = mCameraId;
11469 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011470
11471 /* CDS default */
11472 char prop[PROPERTY_VALUE_MAX];
11473 memset(prop, 0, sizeof(prop));
11474 property_get("persist.camera.CDS", prop, "Auto");
11475 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11476 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11477 if (CAM_CDS_MODE_MAX == cds_mode) {
11478 cds_mode = CAM_CDS_MODE_AUTO;
11479 }
11480
11481 /* Disabling CDS in templates which have TNR enabled*/
11482 if (tnr_enable)
11483 cds_mode = CAM_CDS_MODE_OFF;
11484
11485 int32_t mode = cds_mode;
11486 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011487
Thierry Strudel269c81a2016-10-12 12:13:59 -070011488 /* Manual Convergence AEC Speed is disabled by default*/
11489 float default_aec_speed = 0;
11490 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11491
11492 /* Manual Convergence AWB Speed is disabled by default*/
11493 float default_awb_speed = 0;
11494 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11495
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011496 // Set instant AEC to normal convergence by default
11497 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11498 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11499
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011500 if (gExposeEnableZslKey) {
11501 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070011502 int32_t postview = 0;
11503 settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070011504 int32_t continuousZslCapture = 0;
11505 settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011506 // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
11507 // hybrid ae is enabled for 3rd party app HDR+.
11508 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11509 type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
11510 hybrid_ae = 1;
11511 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011512 }
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011513 /* hybrid ae */
11514 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011515
Thierry Strudel3d639192016-09-09 11:52:26 -070011516 mDefaultMetadata[type] = settings.release();
11517
11518 return mDefaultMetadata[type];
11519}
11520
11521/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011522 * FUNCTION : getExpectedFrameDuration
11523 *
11524 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11525 * duration
11526 *
11527 * PARAMETERS :
11528 * @request : request settings
11529 * @frameDuration : The maximum frame duration in nanoseconds
11530 *
11531 * RETURN : None
11532 *==========================================================================*/
11533void QCamera3HardwareInterface::getExpectedFrameDuration(
11534 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11535 if (nullptr == frameDuration) {
11536 return;
11537 }
11538
11539 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11540 find_camera_metadata_ro_entry(request,
11541 ANDROID_SENSOR_EXPOSURE_TIME,
11542 &e);
11543 if (e.count > 0) {
11544 *frameDuration = e.data.i64[0];
11545 }
11546 find_camera_metadata_ro_entry(request,
11547 ANDROID_SENSOR_FRAME_DURATION,
11548 &e);
11549 if (e.count > 0) {
11550 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11551 }
11552}
11553
11554/*===========================================================================
11555 * FUNCTION : calculateMaxExpectedDuration
11556 *
11557 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11558 * current camera settings.
11559 *
11560 * PARAMETERS :
11561 * @request : request settings
11562 *
11563 * RETURN : Expected frame duration in nanoseconds.
11564 *==========================================================================*/
11565nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11566 const camera_metadata_t *request) {
11567 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11568 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11569 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11570 if (e.count == 0) {
11571 return maxExpectedDuration;
11572 }
11573
11574 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11575 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11576 }
11577
11578 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11579 return maxExpectedDuration;
11580 }
11581
11582 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11583 if (e.count == 0) {
11584 return maxExpectedDuration;
11585 }
11586
11587 switch (e.data.u8[0]) {
11588 case ANDROID_CONTROL_AE_MODE_OFF:
11589 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11590 break;
11591 default:
11592 find_camera_metadata_ro_entry(request,
11593 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11594 &e);
11595 if (e.count > 1) {
11596 maxExpectedDuration = 1e9 / e.data.u8[0];
11597 }
11598 break;
11599 }
11600
11601 return maxExpectedDuration;
11602}
11603
11604/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011605 * FUNCTION : setFrameParameters
11606 *
11607 * DESCRIPTION: set parameters per frame as requested in the metadata from
11608 * framework
11609 *
11610 * PARAMETERS :
11611 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011612 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011613 * @blob_request: Whether this request is a blob request or not
11614 *
11615 * RETURN : success: NO_ERROR
11616 * failure:
11617 *==========================================================================*/
11618int QCamera3HardwareInterface::setFrameParameters(
11619 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011620 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011621 int blob_request,
11622 uint32_t snapshotStreamId)
11623{
11624 /*translate from camera_metadata_t type to parm_type_t*/
11625 int rc = 0;
11626 int32_t hal_version = CAM_HAL_V3;
11627
11628 clear_metadata_buffer(mParameters);
11629 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11630 LOGE("Failed to set hal version in the parameters");
11631 return BAD_VALUE;
11632 }
11633
11634 /*we need to update the frame number in the parameters*/
11635 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11636 request->frame_number)) {
11637 LOGE("Failed to set the frame number in the parameters");
11638 return BAD_VALUE;
11639 }
11640
11641 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011642 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011643 LOGE("Failed to set stream type mask in the parameters");
11644 return BAD_VALUE;
11645 }
11646
11647 if (mUpdateDebugLevel) {
11648 uint32_t dummyDebugLevel = 0;
11649 /* The value of dummyDebugLevel is irrelavent. On
11650 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11651 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11652 dummyDebugLevel)) {
11653 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11654 return BAD_VALUE;
11655 }
11656 mUpdateDebugLevel = false;
11657 }
11658
11659 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011660 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011661 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11662 if (blob_request)
11663 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11664 }
11665
11666 return rc;
11667}
11668
11669/*===========================================================================
11670 * FUNCTION : setReprocParameters
11671 *
11672 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11673 * return it.
11674 *
11675 * PARAMETERS :
11676 * @request : request that needs to be serviced
11677 *
11678 * RETURN : success: NO_ERROR
11679 * failure:
11680 *==========================================================================*/
11681int32_t QCamera3HardwareInterface::setReprocParameters(
11682 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11683 uint32_t snapshotStreamId)
11684{
11685 /*translate from camera_metadata_t type to parm_type_t*/
11686 int rc = 0;
11687
11688 if (NULL == request->settings){
11689 LOGE("Reprocess settings cannot be NULL");
11690 return BAD_VALUE;
11691 }
11692
11693 if (NULL == reprocParam) {
11694 LOGE("Invalid reprocessing metadata buffer");
11695 return BAD_VALUE;
11696 }
11697 clear_metadata_buffer(reprocParam);
11698
11699 /*we need to update the frame number in the parameters*/
11700 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11701 request->frame_number)) {
11702 LOGE("Failed to set the frame number in the parameters");
11703 return BAD_VALUE;
11704 }
11705
11706 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11707 if (rc < 0) {
11708 LOGE("Failed to translate reproc request");
11709 return rc;
11710 }
11711
11712 CameraMetadata frame_settings;
11713 frame_settings = request->settings;
11714 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11715 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11716 int32_t *crop_count =
11717 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11718 int32_t *crop_data =
11719 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11720 int32_t *roi_map =
11721 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11722 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11723 cam_crop_data_t crop_meta;
11724 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11725 crop_meta.num_of_streams = 1;
11726 crop_meta.crop_info[0].crop.left = crop_data[0];
11727 crop_meta.crop_info[0].crop.top = crop_data[1];
11728 crop_meta.crop_info[0].crop.width = crop_data[2];
11729 crop_meta.crop_info[0].crop.height = crop_data[3];
11730
11731 crop_meta.crop_info[0].roi_map.left =
11732 roi_map[0];
11733 crop_meta.crop_info[0].roi_map.top =
11734 roi_map[1];
11735 crop_meta.crop_info[0].roi_map.width =
11736 roi_map[2];
11737 crop_meta.crop_info[0].roi_map.height =
11738 roi_map[3];
11739
11740 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11741 rc = BAD_VALUE;
11742 }
11743 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11744 request->input_buffer->stream,
11745 crop_meta.crop_info[0].crop.left,
11746 crop_meta.crop_info[0].crop.top,
11747 crop_meta.crop_info[0].crop.width,
11748 crop_meta.crop_info[0].crop.height);
11749 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11750 request->input_buffer->stream,
11751 crop_meta.crop_info[0].roi_map.left,
11752 crop_meta.crop_info[0].roi_map.top,
11753 crop_meta.crop_info[0].roi_map.width,
11754 crop_meta.crop_info[0].roi_map.height);
11755 } else {
11756 LOGE("Invalid reprocess crop count %d!", *crop_count);
11757 }
11758 } else {
11759 LOGE("No crop data from matching output stream");
11760 }
11761
11762 /* These settings are not needed for regular requests so handle them specially for
11763 reprocess requests; information needed for EXIF tags */
11764 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11765 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11766 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11767 if (NAME_NOT_FOUND != val) {
11768 uint32_t flashMode = (uint32_t)val;
11769 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11770 rc = BAD_VALUE;
11771 }
11772 } else {
11773 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11774 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11775 }
11776 } else {
11777 LOGH("No flash mode in reprocess settings");
11778 }
11779
11780 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11781 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11782 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11783 rc = BAD_VALUE;
11784 }
11785 } else {
11786 LOGH("No flash state in reprocess settings");
11787 }
11788
11789 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11790 uint8_t *reprocessFlags =
11791 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11792 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11793 *reprocessFlags)) {
11794 rc = BAD_VALUE;
11795 }
11796 }
11797
Thierry Strudel54dc9782017-02-15 12:12:10 -080011798 // Add exif debug data to internal metadata
11799 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11800 mm_jpeg_debug_exif_params_t *debug_params =
11801 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11802 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11803 // AE
11804 if (debug_params->ae_debug_params_valid == TRUE) {
11805 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11806 debug_params->ae_debug_params);
11807 }
11808 // AWB
11809 if (debug_params->awb_debug_params_valid == TRUE) {
11810 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11811 debug_params->awb_debug_params);
11812 }
11813 // AF
11814 if (debug_params->af_debug_params_valid == TRUE) {
11815 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11816 debug_params->af_debug_params);
11817 }
11818 // ASD
11819 if (debug_params->asd_debug_params_valid == TRUE) {
11820 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11821 debug_params->asd_debug_params);
11822 }
11823 // Stats
11824 if (debug_params->stats_debug_params_valid == TRUE) {
11825 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11826 debug_params->stats_debug_params);
11827 }
11828 // BE Stats
11829 if (debug_params->bestats_debug_params_valid == TRUE) {
11830 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11831 debug_params->bestats_debug_params);
11832 }
11833 // BHIST
11834 if (debug_params->bhist_debug_params_valid == TRUE) {
11835 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11836 debug_params->bhist_debug_params);
11837 }
11838 // 3A Tuning
11839 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11840 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11841 debug_params->q3a_tuning_debug_params);
11842 }
11843 }
11844
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011845 // Add metadata which reprocess needs
11846 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11847 cam_reprocess_info_t *repro_info =
11848 (cam_reprocess_info_t *)frame_settings.find
11849 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011850 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011851 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011852 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011853 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011854 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011855 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011856 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011857 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011858 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011859 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011860 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011861 repro_info->pipeline_flip);
11862 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11863 repro_info->af_roi);
11864 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11865 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011866 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11867 CAM_INTF_PARM_ROTATION metadata then has been added in
11868 translateToHalMetadata. HAL need to keep this new rotation
11869 metadata. Otherwise, the old rotation info saved in the vendor tag
11870 would be used */
11871 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11872 CAM_INTF_PARM_ROTATION, reprocParam) {
11873 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11874 } else {
11875 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011876 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011877 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011878 }
11879
11880 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11881 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11882 roi.width and roi.height would be the final JPEG size.
11883 For now, HAL only checks this for reprocess request */
11884 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11885 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11886 uint8_t *enable =
11887 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11888 if (*enable == TRUE) {
11889 int32_t *crop_data =
11890 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11891 cam_stream_crop_info_t crop_meta;
11892 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11893 crop_meta.stream_id = 0;
11894 crop_meta.crop.left = crop_data[0];
11895 crop_meta.crop.top = crop_data[1];
11896 crop_meta.crop.width = crop_data[2];
11897 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011898 // The JPEG crop roi should match cpp output size
11899 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11900 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11901 crop_meta.roi_map.left = 0;
11902 crop_meta.roi_map.top = 0;
11903 crop_meta.roi_map.width = cpp_crop->crop.width;
11904 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011905 }
11906 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11907 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011908 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011909 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011910 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11911 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011912 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011913 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11914
11915 // Add JPEG scale information
11916 cam_dimension_t scale_dim;
11917 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11918 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11919 int32_t *roi =
11920 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11921 scale_dim.width = roi[2];
11922 scale_dim.height = roi[3];
11923 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11924 scale_dim);
11925 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11926 scale_dim.width, scale_dim.height, mCameraId);
11927 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011928 }
11929 }
11930
11931 return rc;
11932}
11933
11934/*===========================================================================
11935 * FUNCTION : saveRequestSettings
11936 *
11937 * DESCRIPTION: Add any settings that might have changed to the request settings
11938 * and save the settings to be applied on the frame
11939 *
11940 * PARAMETERS :
11941 * @jpegMetadata : the extracted and/or modified jpeg metadata
11942 * @request : request with initial settings
11943 *
11944 * RETURN :
11945 * camera_metadata_t* : pointer to the saved request settings
11946 *==========================================================================*/
11947camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11948 const CameraMetadata &jpegMetadata,
11949 camera3_capture_request_t *request)
11950{
11951 camera_metadata_t *resultMetadata;
11952 CameraMetadata camMetadata;
11953 camMetadata = request->settings;
11954
11955 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11956 int32_t thumbnail_size[2];
11957 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11958 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11959 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11960 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11961 }
11962
11963 if (request->input_buffer != NULL) {
11964 uint8_t reprocessFlags = 1;
11965 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11966 (uint8_t*)&reprocessFlags,
11967 sizeof(reprocessFlags));
11968 }
11969
11970 resultMetadata = camMetadata.release();
11971 return resultMetadata;
11972}
11973
11974/*===========================================================================
11975 * FUNCTION : setHalFpsRange
11976 *
11977 * DESCRIPTION: set FPS range parameter
11978 *
11979 *
11980 * PARAMETERS :
11981 * @settings : Metadata from framework
11982 * @hal_metadata: Metadata buffer
11983 *
11984 *
11985 * RETURN : success: NO_ERROR
11986 * failure:
11987 *==========================================================================*/
11988int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11989 metadata_buffer_t *hal_metadata)
11990{
11991 int32_t rc = NO_ERROR;
11992 cam_fps_range_t fps_range;
11993 fps_range.min_fps = (float)
11994 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11995 fps_range.max_fps = (float)
11996 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11997 fps_range.video_min_fps = fps_range.min_fps;
11998 fps_range.video_max_fps = fps_range.max_fps;
11999
12000 LOGD("aeTargetFpsRange fps: [%f %f]",
12001 fps_range.min_fps, fps_range.max_fps);
12002 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
12003 * follows:
12004 * ---------------------------------------------------------------|
12005 * Video stream is absent in configure_streams |
12006 * (Camcorder preview before the first video record |
12007 * ---------------------------------------------------------------|
12008 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12009 * | | | vid_min/max_fps|
12010 * ---------------------------------------------------------------|
12011 * NO | [ 30, 240] | 240 | [240, 240] |
12012 * |-------------|-------------|----------------|
12013 * | [240, 240] | 240 | [240, 240] |
12014 * ---------------------------------------------------------------|
12015 * Video stream is present in configure_streams |
12016 * ---------------------------------------------------------------|
12017 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12018 * | | | vid_min/max_fps|
12019 * ---------------------------------------------------------------|
12020 * NO | [ 30, 240] | 240 | [240, 240] |
12021 * (camcorder prev |-------------|-------------|----------------|
12022 * after video rec | [240, 240] | 240 | [240, 240] |
12023 * is stopped) | | | |
12024 * ---------------------------------------------------------------|
12025 * YES | [ 30, 240] | 240 | [240, 240] |
12026 * |-------------|-------------|----------------|
12027 * | [240, 240] | 240 | [240, 240] |
12028 * ---------------------------------------------------------------|
12029 * When Video stream is absent in configure_streams,
12030 * preview fps = sensor_fps / batchsize
12031 * Eg: for 240fps at batchSize 4, preview = 60fps
12032 * for 120fps at batchSize 4, preview = 30fps
12033 *
12034 * When video stream is present in configure_streams, preview fps is as per
12035 * the ratio of preview buffers to video buffers requested in process
12036 * capture request
12037 */
12038 mBatchSize = 0;
12039 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12040 fps_range.min_fps = fps_range.video_max_fps;
12041 fps_range.video_min_fps = fps_range.video_max_fps;
12042 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12043 fps_range.max_fps);
12044 if (NAME_NOT_FOUND != val) {
12045 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12046 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12047 return BAD_VALUE;
12048 }
12049
12050 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12051 /* If batchmode is currently in progress and the fps changes,
12052 * set the flag to restart the sensor */
12053 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12054 (mHFRVideoFps != fps_range.max_fps)) {
12055 mNeedSensorRestart = true;
12056 }
12057 mHFRVideoFps = fps_range.max_fps;
12058 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12059 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12060 mBatchSize = MAX_HFR_BATCH_SIZE;
12061 }
12062 }
12063 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12064
12065 }
12066 } else {
12067 /* HFR mode is session param in backend/ISP. This should be reset when
12068 * in non-HFR mode */
12069 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12070 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12071 return BAD_VALUE;
12072 }
12073 }
12074 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12075 return BAD_VALUE;
12076 }
12077 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12078 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12079 return rc;
12080}
12081
12082/*===========================================================================
12083 * FUNCTION : translateToHalMetadata
12084 *
12085 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12086 *
12087 *
12088 * PARAMETERS :
12089 * @request : request sent from framework
12090 *
12091 *
12092 * RETURN : success: NO_ERROR
12093 * failure:
12094 *==========================================================================*/
12095int QCamera3HardwareInterface::translateToHalMetadata
12096 (const camera3_capture_request_t *request,
12097 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012098 uint32_t snapshotStreamId) {
12099 if (request == nullptr || hal_metadata == nullptr) {
12100 return BAD_VALUE;
12101 }
12102
12103 int64_t minFrameDuration = getMinFrameDuration(request);
12104
12105 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12106 minFrameDuration);
12107}
12108
12109int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12110 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12111 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12112
Thierry Strudel3d639192016-09-09 11:52:26 -070012113 int rc = 0;
12114 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012115 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012116
12117 /* Do not change the order of the following list unless you know what you are
12118 * doing.
12119 * The order is laid out in such a way that parameters in the front of the table
12120 * may be used to override the parameters later in the table. Examples are:
12121 * 1. META_MODE should precede AEC/AWB/AF MODE
12122 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12123 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12124 * 4. Any mode should precede it's corresponding settings
12125 */
12126 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12127 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12128 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12129 rc = BAD_VALUE;
12130 }
12131 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12132 if (rc != NO_ERROR) {
12133 LOGE("extractSceneMode failed");
12134 }
12135 }
12136
12137 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12138 uint8_t fwk_aeMode =
12139 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12140 uint8_t aeMode;
12141 int32_t redeye;
12142
12143 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12144 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012145 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12146 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012147 } else {
12148 aeMode = CAM_AE_MODE_ON;
12149 }
12150 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12151 redeye = 1;
12152 } else {
12153 redeye = 0;
12154 }
12155
12156 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12157 fwk_aeMode);
12158 if (NAME_NOT_FOUND != val) {
12159 int32_t flashMode = (int32_t)val;
12160 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12161 }
12162
12163 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12164 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12165 rc = BAD_VALUE;
12166 }
12167 }
12168
12169 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12170 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12171 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12172 fwk_whiteLevel);
12173 if (NAME_NOT_FOUND != val) {
12174 uint8_t whiteLevel = (uint8_t)val;
12175 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12176 rc = BAD_VALUE;
12177 }
12178 }
12179 }
12180
12181 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12182 uint8_t fwk_cacMode =
12183 frame_settings.find(
12184 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12185 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12186 fwk_cacMode);
12187 if (NAME_NOT_FOUND != val) {
12188 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12189 bool entryAvailable = FALSE;
12190 // Check whether Frameworks set CAC mode is supported in device or not
12191 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12192 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12193 entryAvailable = TRUE;
12194 break;
12195 }
12196 }
12197 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12198 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12199 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12200 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12201 if (entryAvailable == FALSE) {
12202 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12203 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12204 } else {
12205 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12206 // High is not supported and so set the FAST as spec say's underlying
12207 // device implementation can be the same for both modes.
12208 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12209 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12210 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12211 // in order to avoid the fps drop due to high quality
12212 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12213 } else {
12214 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12215 }
12216 }
12217 }
12218 LOGD("Final cacMode is %d", cacMode);
12219 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12220 rc = BAD_VALUE;
12221 }
12222 } else {
12223 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12224 }
12225 }
12226
Jason Lee84ae9972017-02-24 13:24:24 -080012227 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012228 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012229 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012230 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012231 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12232 fwk_focusMode);
12233 if (NAME_NOT_FOUND != val) {
12234 uint8_t focusMode = (uint8_t)val;
12235 LOGD("set focus mode %d", focusMode);
12236 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12237 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12238 rc = BAD_VALUE;
12239 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012240 }
12241 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012242 } else {
12243 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12244 LOGE("Focus forced to infinity %d", focusMode);
12245 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12246 rc = BAD_VALUE;
12247 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012248 }
12249
Jason Lee84ae9972017-02-24 13:24:24 -080012250 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12251 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012252 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12253 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12254 focalDistance)) {
12255 rc = BAD_VALUE;
12256 }
12257 }
12258
12259 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12260 uint8_t fwk_antibandingMode =
12261 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12262 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12263 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12264 if (NAME_NOT_FOUND != val) {
12265 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012266 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12267 if (m60HzZone) {
12268 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12269 } else {
12270 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12271 }
12272 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012273 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12274 hal_antibandingMode)) {
12275 rc = BAD_VALUE;
12276 }
12277 }
12278 }
12279
12280 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12281 int32_t expCompensation = frame_settings.find(
12282 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12283 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12284 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12285 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12286 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012287 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012288 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12289 expCompensation)) {
12290 rc = BAD_VALUE;
12291 }
12292 }
12293
12294 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12295 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12296 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12297 rc = BAD_VALUE;
12298 }
12299 }
12300 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12301 rc = setHalFpsRange(frame_settings, hal_metadata);
12302 if (rc != NO_ERROR) {
12303 LOGE("setHalFpsRange failed");
12304 }
12305 }
12306
12307 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12308 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12309 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12310 rc = BAD_VALUE;
12311 }
12312 }
12313
12314 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12315 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12316 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12317 fwk_effectMode);
12318 if (NAME_NOT_FOUND != val) {
12319 uint8_t effectMode = (uint8_t)val;
12320 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12321 rc = BAD_VALUE;
12322 }
12323 }
12324 }
12325
12326 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12327 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12328 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12329 colorCorrectMode)) {
12330 rc = BAD_VALUE;
12331 }
12332 }
12333
12334 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12335 cam_color_correct_gains_t colorCorrectGains;
12336 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12337 colorCorrectGains.gains[i] =
12338 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12339 }
12340 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12341 colorCorrectGains)) {
12342 rc = BAD_VALUE;
12343 }
12344 }
12345
12346 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12347 cam_color_correct_matrix_t colorCorrectTransform;
12348 cam_rational_type_t transform_elem;
12349 size_t num = 0;
12350 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12351 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12352 transform_elem.numerator =
12353 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12354 transform_elem.denominator =
12355 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12356 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12357 num++;
12358 }
12359 }
12360 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12361 colorCorrectTransform)) {
12362 rc = BAD_VALUE;
12363 }
12364 }
12365
12366 cam_trigger_t aecTrigger;
12367 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12368 aecTrigger.trigger_id = -1;
12369 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12370 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12371 aecTrigger.trigger =
12372 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12373 aecTrigger.trigger_id =
12374 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12375 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12376 aecTrigger)) {
12377 rc = BAD_VALUE;
12378 }
12379 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12380 aecTrigger.trigger, aecTrigger.trigger_id);
12381 }
12382
12383 /*af_trigger must come with a trigger id*/
12384 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12385 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12386 cam_trigger_t af_trigger;
12387 af_trigger.trigger =
12388 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12389 af_trigger.trigger_id =
12390 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12391 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12392 rc = BAD_VALUE;
12393 }
12394 LOGD("AfTrigger: %d AfTriggerID: %d",
12395 af_trigger.trigger, af_trigger.trigger_id);
12396 }
12397
12398 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12399 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12400 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12401 rc = BAD_VALUE;
12402 }
12403 }
12404 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12405 cam_edge_application_t edge_application;
12406 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012407
Thierry Strudel3d639192016-09-09 11:52:26 -070012408 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12409 edge_application.sharpness = 0;
12410 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012411 edge_application.sharpness =
12412 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12413 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12414 int32_t sharpness =
12415 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12416 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12417 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12418 LOGD("Setting edge mode sharpness %d", sharpness);
12419 edge_application.sharpness = sharpness;
12420 }
12421 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012422 }
12423 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12424 rc = BAD_VALUE;
12425 }
12426 }
12427
12428 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12429 int32_t respectFlashMode = 1;
12430 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12431 uint8_t fwk_aeMode =
12432 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012433 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12434 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12435 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012436 respectFlashMode = 0;
12437 LOGH("AE Mode controls flash, ignore android.flash.mode");
12438 }
12439 }
12440 if (respectFlashMode) {
12441 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12442 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12443 LOGH("flash mode after mapping %d", val);
12444 // To check: CAM_INTF_META_FLASH_MODE usage
12445 if (NAME_NOT_FOUND != val) {
12446 uint8_t flashMode = (uint8_t)val;
12447 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12448 rc = BAD_VALUE;
12449 }
12450 }
12451 }
12452 }
12453
12454 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12455 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12456 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12457 rc = BAD_VALUE;
12458 }
12459 }
12460
12461 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12462 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12463 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12464 flashFiringTime)) {
12465 rc = BAD_VALUE;
12466 }
12467 }
12468
12469 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12470 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12471 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12472 hotPixelMode)) {
12473 rc = BAD_VALUE;
12474 }
12475 }
12476
12477 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12478 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12479 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12480 lensAperture)) {
12481 rc = BAD_VALUE;
12482 }
12483 }
12484
12485 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12486 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12487 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12488 filterDensity)) {
12489 rc = BAD_VALUE;
12490 }
12491 }
12492
12493 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12494 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12495 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12496 focalLength)) {
12497 rc = BAD_VALUE;
12498 }
12499 }
12500
12501 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12502 uint8_t optStabMode =
12503 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12504 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12505 optStabMode)) {
12506 rc = BAD_VALUE;
12507 }
12508 }
12509
12510 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12511 uint8_t videoStabMode =
12512 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12513 LOGD("videoStabMode from APP = %d", videoStabMode);
12514 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12515 videoStabMode)) {
12516 rc = BAD_VALUE;
12517 }
12518 }
12519
12520
12521 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12522 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12523 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12524 noiseRedMode)) {
12525 rc = BAD_VALUE;
12526 }
12527 }
12528
12529 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12530 float reprocessEffectiveExposureFactor =
12531 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12532 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12533 reprocessEffectiveExposureFactor)) {
12534 rc = BAD_VALUE;
12535 }
12536 }
12537
12538 cam_crop_region_t scalerCropRegion;
12539 bool scalerCropSet = false;
12540 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12541 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12542 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12543 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12544 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12545
12546 // Map coordinate system from active array to sensor output.
12547 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12548 scalerCropRegion.width, scalerCropRegion.height);
12549
12550 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12551 scalerCropRegion)) {
12552 rc = BAD_VALUE;
12553 }
12554 scalerCropSet = true;
12555 }
12556
12557 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12558 int64_t sensorExpTime =
12559 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12560 LOGD("setting sensorExpTime %lld", sensorExpTime);
12561 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12562 sensorExpTime)) {
12563 rc = BAD_VALUE;
12564 }
12565 }
12566
12567 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12568 int64_t sensorFrameDuration =
12569 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012570 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12571 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12572 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12573 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12574 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12575 sensorFrameDuration)) {
12576 rc = BAD_VALUE;
12577 }
12578 }
12579
12580 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12581 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12582 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12583 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12584 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12585 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12586 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12587 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12588 sensorSensitivity)) {
12589 rc = BAD_VALUE;
12590 }
12591 }
12592
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012593#ifndef USE_HAL_3_3
12594 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12595 int32_t ispSensitivity =
12596 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12597 if (ispSensitivity <
12598 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12599 ispSensitivity =
12600 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12601 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12602 }
12603 if (ispSensitivity >
12604 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12605 ispSensitivity =
12606 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12607 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12608 }
12609 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12610 ispSensitivity)) {
12611 rc = BAD_VALUE;
12612 }
12613 }
12614#endif
12615
Thierry Strudel3d639192016-09-09 11:52:26 -070012616 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12617 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12618 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12619 rc = BAD_VALUE;
12620 }
12621 }
12622
12623 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12624 uint8_t fwk_facedetectMode =
12625 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12626
12627 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12628 fwk_facedetectMode);
12629
12630 if (NAME_NOT_FOUND != val) {
12631 uint8_t facedetectMode = (uint8_t)val;
12632 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12633 facedetectMode)) {
12634 rc = BAD_VALUE;
12635 }
12636 }
12637 }
12638
Thierry Strudel54dc9782017-02-15 12:12:10 -080012639 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012640 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012641 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012642 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12643 histogramMode)) {
12644 rc = BAD_VALUE;
12645 }
12646 }
12647
12648 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12649 uint8_t sharpnessMapMode =
12650 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12651 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12652 sharpnessMapMode)) {
12653 rc = BAD_VALUE;
12654 }
12655 }
12656
12657 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12658 uint8_t tonemapMode =
12659 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12660 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12661 rc = BAD_VALUE;
12662 }
12663 }
12664 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12665 /*All tonemap channels will have the same number of points*/
12666 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12667 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12668 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12669 cam_rgb_tonemap_curves tonemapCurves;
12670 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12671 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12672 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12673 tonemapCurves.tonemap_points_cnt,
12674 CAM_MAX_TONEMAP_CURVE_SIZE);
12675 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12676 }
12677
12678 /* ch0 = G*/
12679 size_t point = 0;
12680 cam_tonemap_curve_t tonemapCurveGreen;
12681 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12682 for (size_t j = 0; j < 2; j++) {
12683 tonemapCurveGreen.tonemap_points[i][j] =
12684 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12685 point++;
12686 }
12687 }
12688 tonemapCurves.curves[0] = tonemapCurveGreen;
12689
12690 /* ch 1 = B */
12691 point = 0;
12692 cam_tonemap_curve_t tonemapCurveBlue;
12693 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12694 for (size_t j = 0; j < 2; j++) {
12695 tonemapCurveBlue.tonemap_points[i][j] =
12696 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12697 point++;
12698 }
12699 }
12700 tonemapCurves.curves[1] = tonemapCurveBlue;
12701
12702 /* ch 2 = R */
12703 point = 0;
12704 cam_tonemap_curve_t tonemapCurveRed;
12705 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12706 for (size_t j = 0; j < 2; j++) {
12707 tonemapCurveRed.tonemap_points[i][j] =
12708 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12709 point++;
12710 }
12711 }
12712 tonemapCurves.curves[2] = tonemapCurveRed;
12713
12714 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12715 tonemapCurves)) {
12716 rc = BAD_VALUE;
12717 }
12718 }
12719
12720 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12721 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12722 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12723 captureIntent)) {
12724 rc = BAD_VALUE;
12725 }
12726 }
12727
12728 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12729 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12730 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12731 blackLevelLock)) {
12732 rc = BAD_VALUE;
12733 }
12734 }
12735
12736 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12737 uint8_t lensShadingMapMode =
12738 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12739 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12740 lensShadingMapMode)) {
12741 rc = BAD_VALUE;
12742 }
12743 }
12744
12745 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12746 cam_area_t roi;
12747 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012748 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012749
12750 // Map coordinate system from active array to sensor output.
12751 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12752 roi.rect.height);
12753
12754 if (scalerCropSet) {
12755 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12756 }
12757 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12758 rc = BAD_VALUE;
12759 }
12760 }
12761
12762 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12763 cam_area_t roi;
12764 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012765 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012766
12767 // Map coordinate system from active array to sensor output.
12768 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12769 roi.rect.height);
12770
12771 if (scalerCropSet) {
12772 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12773 }
12774 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12775 rc = BAD_VALUE;
12776 }
12777 }
12778
12779 // CDS for non-HFR non-video mode
12780 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12781 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12782 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12783 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12784 LOGE("Invalid CDS mode %d!", *fwk_cds);
12785 } else {
12786 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12787 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12788 rc = BAD_VALUE;
12789 }
12790 }
12791 }
12792
Thierry Strudel04e026f2016-10-10 11:27:36 -070012793 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012794 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012795 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012796 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12797 }
12798 if (m_bVideoHdrEnabled)
12799 vhdr = CAM_VIDEO_HDR_MODE_ON;
12800
Thierry Strudel54dc9782017-02-15 12:12:10 -080012801 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12802
12803 if(vhdr != curr_hdr_state)
12804 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12805
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012806 rc = setVideoHdrMode(mParameters, vhdr);
12807 if (rc != NO_ERROR) {
12808 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012809 }
12810
12811 //IR
12812 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12813 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12814 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012815 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12816 uint8_t isIRon = 0;
12817
12818 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012819 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12820 LOGE("Invalid IR mode %d!", fwk_ir);
12821 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012822 if(isIRon != curr_ir_state )
12823 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12824
Thierry Strudel04e026f2016-10-10 11:27:36 -070012825 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12826 CAM_INTF_META_IR_MODE, fwk_ir)) {
12827 rc = BAD_VALUE;
12828 }
12829 }
12830 }
12831
Thierry Strudel54dc9782017-02-15 12:12:10 -080012832 //Binning Correction Mode
12833 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12834 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12835 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12836 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12837 || (0 > fwk_binning_correction)) {
12838 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12839 } else {
12840 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12841 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12842 rc = BAD_VALUE;
12843 }
12844 }
12845 }
12846
Thierry Strudel269c81a2016-10-12 12:13:59 -070012847 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12848 float aec_speed;
12849 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12850 LOGD("AEC Speed :%f", aec_speed);
12851 if ( aec_speed < 0 ) {
12852 LOGE("Invalid AEC mode %f!", aec_speed);
12853 } else {
12854 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12855 aec_speed)) {
12856 rc = BAD_VALUE;
12857 }
12858 }
12859 }
12860
12861 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12862 float awb_speed;
12863 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12864 LOGD("AWB Speed :%f", awb_speed);
12865 if ( awb_speed < 0 ) {
12866 LOGE("Invalid AWB mode %f!", awb_speed);
12867 } else {
12868 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12869 awb_speed)) {
12870 rc = BAD_VALUE;
12871 }
12872 }
12873 }
12874
Thierry Strudel3d639192016-09-09 11:52:26 -070012875 // TNR
12876 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12877 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12878 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012879 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012880 cam_denoise_param_t tnr;
12881 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12882 tnr.process_plates =
12883 (cam_denoise_process_type_t)frame_settings.find(
12884 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12885 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012886
12887 if(b_TnrRequested != curr_tnr_state)
12888 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12889
Thierry Strudel3d639192016-09-09 11:52:26 -070012890 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12891 rc = BAD_VALUE;
12892 }
12893 }
12894
Thierry Strudel54dc9782017-02-15 12:12:10 -080012895 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012896 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012897 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012898 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12899 *exposure_metering_mode)) {
12900 rc = BAD_VALUE;
12901 }
12902 }
12903
Thierry Strudel3d639192016-09-09 11:52:26 -070012904 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12905 int32_t fwk_testPatternMode =
12906 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12907 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12908 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12909
12910 if (NAME_NOT_FOUND != testPatternMode) {
12911 cam_test_pattern_data_t testPatternData;
12912 memset(&testPatternData, 0, sizeof(testPatternData));
12913 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12914 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12915 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12916 int32_t *fwk_testPatternData =
12917 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12918 testPatternData.r = fwk_testPatternData[0];
12919 testPatternData.b = fwk_testPatternData[3];
12920 switch (gCamCapability[mCameraId]->color_arrangement) {
12921 case CAM_FILTER_ARRANGEMENT_RGGB:
12922 case CAM_FILTER_ARRANGEMENT_GRBG:
12923 testPatternData.gr = fwk_testPatternData[1];
12924 testPatternData.gb = fwk_testPatternData[2];
12925 break;
12926 case CAM_FILTER_ARRANGEMENT_GBRG:
12927 case CAM_FILTER_ARRANGEMENT_BGGR:
12928 testPatternData.gr = fwk_testPatternData[2];
12929 testPatternData.gb = fwk_testPatternData[1];
12930 break;
12931 default:
12932 LOGE("color arrangement %d is not supported",
12933 gCamCapability[mCameraId]->color_arrangement);
12934 break;
12935 }
12936 }
12937 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12938 testPatternData)) {
12939 rc = BAD_VALUE;
12940 }
12941 } else {
12942 LOGE("Invalid framework sensor test pattern mode %d",
12943 fwk_testPatternMode);
12944 }
12945 }
12946
12947 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12948 size_t count = 0;
12949 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12950 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12951 gps_coords.data.d, gps_coords.count, count);
12952 if (gps_coords.count != count) {
12953 rc = BAD_VALUE;
12954 }
12955 }
12956
12957 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12958 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12959 size_t count = 0;
12960 const char *gps_methods_src = (const char *)
12961 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12962 memset(gps_methods, '\0', sizeof(gps_methods));
12963 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12964 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12965 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12966 if (GPS_PROCESSING_METHOD_SIZE != count) {
12967 rc = BAD_VALUE;
12968 }
12969 }
12970
12971 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12972 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12973 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12974 gps_timestamp)) {
12975 rc = BAD_VALUE;
12976 }
12977 }
12978
12979 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12980 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12981 cam_rotation_info_t rotation_info;
12982 if (orientation == 0) {
12983 rotation_info.rotation = ROTATE_0;
12984 } else if (orientation == 90) {
12985 rotation_info.rotation = ROTATE_90;
12986 } else if (orientation == 180) {
12987 rotation_info.rotation = ROTATE_180;
12988 } else if (orientation == 270) {
12989 rotation_info.rotation = ROTATE_270;
12990 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012991 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012992 rotation_info.streamId = snapshotStreamId;
12993 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12994 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12995 rc = BAD_VALUE;
12996 }
12997 }
12998
12999 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
13000 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
13001 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
13002 rc = BAD_VALUE;
13003 }
13004 }
13005
13006 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13007 uint32_t thumb_quality = (uint32_t)
13008 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13009 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13010 thumb_quality)) {
13011 rc = BAD_VALUE;
13012 }
13013 }
13014
13015 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13016 cam_dimension_t dim;
13017 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13018 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13019 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13020 rc = BAD_VALUE;
13021 }
13022 }
13023
13024 // Internal metadata
13025 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13026 size_t count = 0;
13027 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13028 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13029 privatedata.data.i32, privatedata.count, count);
13030 if (privatedata.count != count) {
13031 rc = BAD_VALUE;
13032 }
13033 }
13034
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013035 // ISO/Exposure Priority
13036 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13037 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13038 cam_priority_mode_t mode =
13039 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13040 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13041 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13042 use_iso_exp_pty.previewOnly = FALSE;
13043 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13044 use_iso_exp_pty.value = *ptr;
13045
13046 if(CAM_ISO_PRIORITY == mode) {
13047 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13048 use_iso_exp_pty)) {
13049 rc = BAD_VALUE;
13050 }
13051 }
13052 else {
13053 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13054 use_iso_exp_pty)) {
13055 rc = BAD_VALUE;
13056 }
13057 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013058
13059 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13060 rc = BAD_VALUE;
13061 }
13062 }
13063 } else {
13064 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13065 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013066 }
13067 }
13068
13069 // Saturation
13070 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13071 int32_t* use_saturation =
13072 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13073 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13074 rc = BAD_VALUE;
13075 }
13076 }
13077
Thierry Strudel3d639192016-09-09 11:52:26 -070013078 // EV step
13079 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13080 gCamCapability[mCameraId]->exp_compensation_step)) {
13081 rc = BAD_VALUE;
13082 }
13083
13084 // CDS info
13085 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13086 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13087 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13088
13089 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13090 CAM_INTF_META_CDS_DATA, *cdsData)) {
13091 rc = BAD_VALUE;
13092 }
13093 }
13094
Shuzhen Wang19463d72016-03-08 11:09:52 -080013095 // Hybrid AE
13096 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13097 uint8_t *hybrid_ae = (uint8_t *)
13098 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
Shuzhen Wang77b049a2017-08-30 12:24:36 -070013099 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13100 rc = BAD_VALUE;
13101 }
Shuzhen Wang19463d72016-03-08 11:09:52 -080013102 }
13103
Shuzhen Wang14415f52016-11-16 18:26:18 -080013104 // Histogram
13105 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13106 uint8_t histogramMode =
13107 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13108 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13109 histogramMode)) {
13110 rc = BAD_VALUE;
13111 }
13112 }
13113
13114 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13115 int32_t histogramBins =
13116 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13117 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13118 histogramBins)) {
13119 rc = BAD_VALUE;
13120 }
13121 }
13122
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013123 // Tracking AF
13124 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13125 uint8_t trackingAfTrigger =
13126 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13127 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13128 trackingAfTrigger)) {
13129 rc = BAD_VALUE;
13130 }
13131 }
13132
Chien-Yu Chendbd619b2017-08-04 17:50:11 -070013133 // Makernote
13134 camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13135 if (entry.count != 0) {
13136 if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13137 cam_makernote_t makernote;
13138 makernote.length = entry.count;
13139 memcpy(makernote.data, entry.data.u8, makernote.length);
13140 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13141 rc = BAD_VALUE;
13142 }
13143 } else {
13144 ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13145 MAX_MAKERNOTE_LENGTH);
13146 rc = BAD_VALUE;
13147 }
13148 }
13149
Thierry Strudel3d639192016-09-09 11:52:26 -070013150 return rc;
13151}
13152
13153/*===========================================================================
13154 * FUNCTION : captureResultCb
13155 *
13156 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13157 *
13158 * PARAMETERS :
13159 * @frame : frame information from mm-camera-interface
13160 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13161 * @userdata: userdata
13162 *
13163 * RETURN : NONE
13164 *==========================================================================*/
13165void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13166 camera3_stream_buffer_t *buffer,
13167 uint32_t frame_number, bool isInputBuffer, void *userdata)
13168{
13169 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13170 if (hw == NULL) {
13171 LOGE("Invalid hw %p", hw);
13172 return;
13173 }
13174
13175 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13176 return;
13177}
13178
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013179/*===========================================================================
13180 * FUNCTION : setBufferErrorStatus
13181 *
13182 * DESCRIPTION: Callback handler for channels to report any buffer errors
13183 *
13184 * PARAMETERS :
13185 * @ch : Channel on which buffer error is reported from
13186 * @frame_number : frame number on which buffer error is reported on
13187 * @buffer_status : buffer error status
13188 * @userdata: userdata
13189 *
13190 * RETURN : NONE
13191 *==========================================================================*/
13192void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13193 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13194{
13195 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13196 if (hw == NULL) {
13197 LOGE("Invalid hw %p", hw);
13198 return;
13199 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013200
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013201 hw->setBufferErrorStatus(ch, frame_number, err);
13202 return;
13203}
13204
13205void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13206 uint32_t frameNumber, camera3_buffer_status_t err)
13207{
13208 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13209 pthread_mutex_lock(&mMutex);
13210
13211 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13212 if (req.frame_number != frameNumber)
13213 continue;
13214 for (auto& k : req.mPendingBufferList) {
13215 if(k.stream->priv == ch) {
13216 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13217 }
13218 }
13219 }
13220
13221 pthread_mutex_unlock(&mMutex);
13222 return;
13223}
Thierry Strudel3d639192016-09-09 11:52:26 -070013224/*===========================================================================
13225 * FUNCTION : initialize
13226 *
13227 * DESCRIPTION: Pass framework callback pointers to HAL
13228 *
13229 * PARAMETERS :
13230 *
13231 *
13232 * RETURN : Success : 0
13233 * Failure: -ENODEV
13234 *==========================================================================*/
13235
13236int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13237 const camera3_callback_ops_t *callback_ops)
13238{
13239 LOGD("E");
13240 QCamera3HardwareInterface *hw =
13241 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13242 if (!hw) {
13243 LOGE("NULL camera device");
13244 return -ENODEV;
13245 }
13246
13247 int rc = hw->initialize(callback_ops);
13248 LOGD("X");
13249 return rc;
13250}
13251
13252/*===========================================================================
13253 * FUNCTION : configure_streams
13254 *
13255 * DESCRIPTION:
13256 *
13257 * PARAMETERS :
13258 *
13259 *
13260 * RETURN : Success: 0
13261 * Failure: -EINVAL (if stream configuration is invalid)
13262 * -ENODEV (fatal error)
13263 *==========================================================================*/
13264
13265int QCamera3HardwareInterface::configure_streams(
13266 const struct camera3_device *device,
13267 camera3_stream_configuration_t *stream_list)
13268{
13269 LOGD("E");
13270 QCamera3HardwareInterface *hw =
13271 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13272 if (!hw) {
13273 LOGE("NULL camera device");
13274 return -ENODEV;
13275 }
13276 int rc = hw->configureStreams(stream_list);
13277 LOGD("X");
13278 return rc;
13279}
13280
13281/*===========================================================================
13282 * FUNCTION : construct_default_request_settings
13283 *
13284 * DESCRIPTION: Configure a settings buffer to meet the required use case
13285 *
13286 * PARAMETERS :
13287 *
13288 *
13289 * RETURN : Success: Return valid metadata
13290 * Failure: Return NULL
13291 *==========================================================================*/
13292const camera_metadata_t* QCamera3HardwareInterface::
13293 construct_default_request_settings(const struct camera3_device *device,
13294 int type)
13295{
13296
13297 LOGD("E");
13298 camera_metadata_t* fwk_metadata = NULL;
13299 QCamera3HardwareInterface *hw =
13300 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13301 if (!hw) {
13302 LOGE("NULL camera device");
13303 return NULL;
13304 }
13305
13306 fwk_metadata = hw->translateCapabilityToMetadata(type);
13307
13308 LOGD("X");
13309 return fwk_metadata;
13310}
13311
13312/*===========================================================================
13313 * FUNCTION : process_capture_request
13314 *
13315 * DESCRIPTION:
13316 *
13317 * PARAMETERS :
13318 *
13319 *
13320 * RETURN :
13321 *==========================================================================*/
13322int QCamera3HardwareInterface::process_capture_request(
13323 const struct camera3_device *device,
13324 camera3_capture_request_t *request)
13325{
13326 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013327 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013328 QCamera3HardwareInterface *hw =
13329 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13330 if (!hw) {
13331 LOGE("NULL camera device");
13332 return -EINVAL;
13333 }
13334
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013335 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013336 LOGD("X");
13337 return rc;
13338}
13339
13340/*===========================================================================
13341 * FUNCTION : dump
13342 *
13343 * DESCRIPTION:
13344 *
13345 * PARAMETERS :
13346 *
13347 *
13348 * RETURN :
13349 *==========================================================================*/
13350
13351void QCamera3HardwareInterface::dump(
13352 const struct camera3_device *device, int fd)
13353{
13354 /* Log level property is read when "adb shell dumpsys media.camera" is
13355 called so that the log level can be controlled without restarting
13356 the media server */
13357 getLogLevel();
13358
13359 LOGD("E");
13360 QCamera3HardwareInterface *hw =
13361 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13362 if (!hw) {
13363 LOGE("NULL camera device");
13364 return;
13365 }
13366
13367 hw->dump(fd);
13368 LOGD("X");
13369 return;
13370}
13371
13372/*===========================================================================
13373 * FUNCTION : flush
13374 *
13375 * DESCRIPTION:
13376 *
13377 * PARAMETERS :
13378 *
13379 *
13380 * RETURN :
13381 *==========================================================================*/
13382
13383int QCamera3HardwareInterface::flush(
13384 const struct camera3_device *device)
13385{
13386 int rc;
13387 LOGD("E");
13388 QCamera3HardwareInterface *hw =
13389 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13390 if (!hw) {
13391 LOGE("NULL camera device");
13392 return -EINVAL;
13393 }
13394
13395 pthread_mutex_lock(&hw->mMutex);
13396 // Validate current state
13397 switch (hw->mState) {
13398 case STARTED:
13399 /* valid state */
13400 break;
13401
13402 case ERROR:
13403 pthread_mutex_unlock(&hw->mMutex);
13404 hw->handleCameraDeviceError();
13405 return -ENODEV;
13406
13407 default:
13408 LOGI("Flush returned during state %d", hw->mState);
13409 pthread_mutex_unlock(&hw->mMutex);
13410 return 0;
13411 }
13412 pthread_mutex_unlock(&hw->mMutex);
13413
13414 rc = hw->flush(true /* restart channels */ );
13415 LOGD("X");
13416 return rc;
13417}
13418
13419/*===========================================================================
13420 * FUNCTION : close_camera_device
13421 *
13422 * DESCRIPTION:
13423 *
13424 * PARAMETERS :
13425 *
13426 *
13427 * RETURN :
13428 *==========================================================================*/
13429int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13430{
13431 int ret = NO_ERROR;
13432 QCamera3HardwareInterface *hw =
13433 reinterpret_cast<QCamera3HardwareInterface *>(
13434 reinterpret_cast<camera3_device_t *>(device)->priv);
13435 if (!hw) {
13436 LOGE("NULL camera device");
13437 return BAD_VALUE;
13438 }
13439
13440 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13441 delete hw;
13442 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013443 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013444 return ret;
13445}
13446
13447/*===========================================================================
13448 * FUNCTION : getWaveletDenoiseProcessPlate
13449 *
13450 * DESCRIPTION: query wavelet denoise process plate
13451 *
13452 * PARAMETERS : None
13453 *
13454 * RETURN : WNR prcocess plate value
13455 *==========================================================================*/
13456cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13457{
13458 char prop[PROPERTY_VALUE_MAX];
13459 memset(prop, 0, sizeof(prop));
13460 property_get("persist.denoise.process.plates", prop, "0");
13461 int processPlate = atoi(prop);
13462 switch(processPlate) {
13463 case 0:
13464 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13465 case 1:
13466 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13467 case 2:
13468 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13469 case 3:
13470 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13471 default:
13472 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13473 }
13474}
13475
13476
13477/*===========================================================================
13478 * FUNCTION : getTemporalDenoiseProcessPlate
13479 *
13480 * DESCRIPTION: query temporal denoise process plate
13481 *
13482 * PARAMETERS : None
13483 *
13484 * RETURN : TNR prcocess plate value
13485 *==========================================================================*/
13486cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13487{
13488 char prop[PROPERTY_VALUE_MAX];
13489 memset(prop, 0, sizeof(prop));
13490 property_get("persist.tnr.process.plates", prop, "0");
13491 int processPlate = atoi(prop);
13492 switch(processPlate) {
13493 case 0:
13494 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13495 case 1:
13496 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13497 case 2:
13498 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13499 case 3:
13500 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13501 default:
13502 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13503 }
13504}
13505
13506
13507/*===========================================================================
13508 * FUNCTION : extractSceneMode
13509 *
13510 * DESCRIPTION: Extract scene mode from frameworks set metadata
13511 *
13512 * PARAMETERS :
13513 * @frame_settings: CameraMetadata reference
13514 * @metaMode: ANDROID_CONTORL_MODE
13515 * @hal_metadata: hal metadata structure
13516 *
13517 * RETURN : None
13518 *==========================================================================*/
13519int32_t QCamera3HardwareInterface::extractSceneMode(
13520 const CameraMetadata &frame_settings, uint8_t metaMode,
13521 metadata_buffer_t *hal_metadata)
13522{
13523 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013524 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13525
13526 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13527 LOGD("Ignoring control mode OFF_KEEP_STATE");
13528 return NO_ERROR;
13529 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013530
13531 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13532 camera_metadata_ro_entry entry =
13533 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13534 if (0 == entry.count)
13535 return rc;
13536
13537 uint8_t fwk_sceneMode = entry.data.u8[0];
13538
13539 int val = lookupHalName(SCENE_MODES_MAP,
13540 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13541 fwk_sceneMode);
13542 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013543 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013544 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013545 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013546 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013547
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013548 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13549 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13550 }
13551
13552 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13553 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013554 cam_hdr_param_t hdr_params;
13555 hdr_params.hdr_enable = 1;
13556 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13557 hdr_params.hdr_need_1x = false;
13558 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13559 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13560 rc = BAD_VALUE;
13561 }
13562 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013563
Thierry Strudel3d639192016-09-09 11:52:26 -070013564 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13565 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13566 rc = BAD_VALUE;
13567 }
13568 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013569
13570 if (mForceHdrSnapshot) {
13571 cam_hdr_param_t hdr_params;
13572 hdr_params.hdr_enable = 1;
13573 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13574 hdr_params.hdr_need_1x = false;
13575 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13576 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13577 rc = BAD_VALUE;
13578 }
13579 }
13580
Thierry Strudel3d639192016-09-09 11:52:26 -070013581 return rc;
13582}
13583
13584/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013585 * FUNCTION : setVideoHdrMode
13586 *
13587 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13588 *
13589 * PARAMETERS :
13590 * @hal_metadata: hal metadata structure
13591 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13592 *
13593 * RETURN : None
13594 *==========================================================================*/
13595int32_t QCamera3HardwareInterface::setVideoHdrMode(
13596 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13597{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013598 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13599 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13600 }
13601
13602 LOGE("Invalid Video HDR mode %d!", vhdr);
13603 return BAD_VALUE;
13604}
13605
13606/*===========================================================================
13607 * FUNCTION : setSensorHDR
13608 *
13609 * DESCRIPTION: Enable/disable sensor HDR.
13610 *
13611 * PARAMETERS :
13612 * @hal_metadata: hal metadata structure
13613 * @enable: boolean whether to enable/disable sensor HDR
13614 *
13615 * RETURN : None
13616 *==========================================================================*/
13617int32_t QCamera3HardwareInterface::setSensorHDR(
13618 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13619{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013620 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013621 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13622
13623 if (enable) {
13624 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13625 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13626 #ifdef _LE_CAMERA_
13627 //Default to staggered HDR for IOT
13628 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13629 #else
13630 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13631 #endif
13632 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13633 }
13634
13635 bool isSupported = false;
13636 switch (sensor_hdr) {
13637 case CAM_SENSOR_HDR_IN_SENSOR:
13638 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13639 CAM_QCOM_FEATURE_SENSOR_HDR) {
13640 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013641 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013642 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013643 break;
13644 case CAM_SENSOR_HDR_ZIGZAG:
13645 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13646 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13647 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013648 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013649 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013650 break;
13651 case CAM_SENSOR_HDR_STAGGERED:
13652 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13653 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13654 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013655 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013656 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013657 break;
13658 case CAM_SENSOR_HDR_OFF:
13659 isSupported = true;
13660 LOGD("Turning off sensor HDR");
13661 break;
13662 default:
13663 LOGE("HDR mode %d not supported", sensor_hdr);
13664 rc = BAD_VALUE;
13665 break;
13666 }
13667
13668 if(isSupported) {
13669 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13670 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13671 rc = BAD_VALUE;
13672 } else {
13673 if(!isVideoHdrEnable)
13674 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013675 }
13676 }
13677 return rc;
13678}
13679
13680/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013681 * FUNCTION : needRotationReprocess
13682 *
13683 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13684 *
13685 * PARAMETERS : none
13686 *
13687 * RETURN : true: needed
13688 * false: no need
13689 *==========================================================================*/
13690bool QCamera3HardwareInterface::needRotationReprocess()
13691{
13692 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13693 // current rotation is not zero, and pp has the capability to process rotation
13694 LOGH("need do reprocess for rotation");
13695 return true;
13696 }
13697
13698 return false;
13699}
13700
13701/*===========================================================================
13702 * FUNCTION : needReprocess
13703 *
13704 * DESCRIPTION: if reprocess in needed
13705 *
13706 * PARAMETERS : none
13707 *
13708 * RETURN : true: needed
13709 * false: no need
13710 *==========================================================================*/
13711bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13712{
13713 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13714 // TODO: add for ZSL HDR later
13715 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13716 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13717 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13718 return true;
13719 } else {
13720 LOGH("already post processed frame");
13721 return false;
13722 }
13723 }
13724 return needRotationReprocess();
13725}
13726
13727/*===========================================================================
13728 * FUNCTION : needJpegExifRotation
13729 *
13730 * DESCRIPTION: if rotation from jpeg is needed
13731 *
13732 * PARAMETERS : none
13733 *
13734 * RETURN : true: needed
13735 * false: no need
13736 *==========================================================================*/
13737bool QCamera3HardwareInterface::needJpegExifRotation()
13738{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013739 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013740 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13741 LOGD("Need use Jpeg EXIF Rotation");
13742 return true;
13743 }
13744 return false;
13745}
13746
13747/*===========================================================================
13748 * FUNCTION : addOfflineReprocChannel
13749 *
13750 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13751 * coming from input channel
13752 *
13753 * PARAMETERS :
13754 * @config : reprocess configuration
13755 * @inputChHandle : pointer to the input (source) channel
13756 *
13757 *
13758 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13759 *==========================================================================*/
13760QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13761 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13762{
13763 int32_t rc = NO_ERROR;
13764 QCamera3ReprocessChannel *pChannel = NULL;
13765
13766 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013767 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13768 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013769 if (NULL == pChannel) {
13770 LOGE("no mem for reprocess channel");
13771 return NULL;
13772 }
13773
13774 rc = pChannel->initialize(IS_TYPE_NONE);
13775 if (rc != NO_ERROR) {
13776 LOGE("init reprocess channel failed, ret = %d", rc);
13777 delete pChannel;
13778 return NULL;
13779 }
13780
13781 // pp feature config
13782 cam_pp_feature_config_t pp_config;
13783 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13784
13785 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13786 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13787 & CAM_QCOM_FEATURE_DSDN) {
13788 //Use CPP CDS incase h/w supports it.
13789 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13790 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13791 }
13792 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13793 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13794 }
13795
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013796 if (config.hdr_param.hdr_enable) {
13797 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13798 pp_config.hdr_param = config.hdr_param;
13799 }
13800
13801 if (mForceHdrSnapshot) {
13802 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13803 pp_config.hdr_param.hdr_enable = 1;
13804 pp_config.hdr_param.hdr_need_1x = 0;
13805 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13806 }
13807
Thierry Strudel3d639192016-09-09 11:52:26 -070013808 rc = pChannel->addReprocStreamsFromSource(pp_config,
13809 config,
13810 IS_TYPE_NONE,
13811 mMetadataChannel);
13812
13813 if (rc != NO_ERROR) {
13814 delete pChannel;
13815 return NULL;
13816 }
13817 return pChannel;
13818}
13819
13820/*===========================================================================
13821 * FUNCTION : getMobicatMask
13822 *
13823 * DESCRIPTION: returns mobicat mask
13824 *
13825 * PARAMETERS : none
13826 *
13827 * RETURN : mobicat mask
13828 *
13829 *==========================================================================*/
13830uint8_t QCamera3HardwareInterface::getMobicatMask()
13831{
13832 return m_MobicatMask;
13833}
13834
13835/*===========================================================================
13836 * FUNCTION : setMobicat
13837 *
13838 * DESCRIPTION: set Mobicat on/off.
13839 *
13840 * PARAMETERS :
13841 * @params : none
13842 *
13843 * RETURN : int32_t type of status
13844 * NO_ERROR -- success
13845 * none-zero failure code
13846 *==========================================================================*/
13847int32_t QCamera3HardwareInterface::setMobicat()
13848{
Thierry Strudel3d639192016-09-09 11:52:26 -070013849 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013850
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013851 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013852 tune_cmd_t tune_cmd;
13853 tune_cmd.type = SET_RELOAD_CHROMATIX;
13854 tune_cmd.module = MODULE_ALL;
13855 tune_cmd.value = TRUE;
13856 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13857 CAM_INTF_PARM_SET_VFE_COMMAND,
13858 tune_cmd);
13859
13860 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13861 CAM_INTF_PARM_SET_PP_COMMAND,
13862 tune_cmd);
13863 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013864
13865 return ret;
13866}
13867
13868/*===========================================================================
13869* FUNCTION : getLogLevel
13870*
13871* DESCRIPTION: Reads the log level property into a variable
13872*
13873* PARAMETERS :
13874* None
13875*
13876* RETURN :
13877* None
13878*==========================================================================*/
13879void QCamera3HardwareInterface::getLogLevel()
13880{
13881 char prop[PROPERTY_VALUE_MAX];
13882 uint32_t globalLogLevel = 0;
13883
13884 property_get("persist.camera.hal.debug", prop, "0");
13885 int val = atoi(prop);
13886 if (0 <= val) {
13887 gCamHal3LogLevel = (uint32_t)val;
13888 }
13889
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013890 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013891 gKpiDebugLevel = atoi(prop);
13892
13893 property_get("persist.camera.global.debug", prop, "0");
13894 val = atoi(prop);
13895 if (0 <= val) {
13896 globalLogLevel = (uint32_t)val;
13897 }
13898
13899 /* Highest log level among hal.logs and global.logs is selected */
13900 if (gCamHal3LogLevel < globalLogLevel)
13901 gCamHal3LogLevel = globalLogLevel;
13902
13903 return;
13904}
13905
13906/*===========================================================================
13907 * FUNCTION : validateStreamRotations
13908 *
13909 * DESCRIPTION: Check if the rotations requested are supported
13910 *
13911 * PARAMETERS :
13912 * @stream_list : streams to be configured
13913 *
13914 * RETURN : NO_ERROR on success
13915 * -EINVAL on failure
13916 *
13917 *==========================================================================*/
13918int QCamera3HardwareInterface::validateStreamRotations(
13919 camera3_stream_configuration_t *streamList)
13920{
13921 int rc = NO_ERROR;
13922
13923 /*
13924 * Loop through all streams requested in configuration
13925 * Check if unsupported rotations have been requested on any of them
13926 */
13927 for (size_t j = 0; j < streamList->num_streams; j++){
13928 camera3_stream_t *newStream = streamList->streams[j];
13929
Emilian Peev35ceeed2017-06-29 11:58:56 -070013930 switch(newStream->rotation) {
13931 case CAMERA3_STREAM_ROTATION_0:
13932 case CAMERA3_STREAM_ROTATION_90:
13933 case CAMERA3_STREAM_ROTATION_180:
13934 case CAMERA3_STREAM_ROTATION_270:
13935 //Expected values
13936 break;
13937 default:
13938 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13939 "type:%d and stream format:%d", __func__,
13940 newStream->rotation, newStream->stream_type,
13941 newStream->format);
13942 return -EINVAL;
13943 }
13944
Thierry Strudel3d639192016-09-09 11:52:26 -070013945 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13946 bool isImplDef = (newStream->format ==
13947 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13948 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13949 isImplDef);
13950
13951 if (isRotated && (!isImplDef || isZsl)) {
13952 LOGE("Error: Unsupported rotation of %d requested for stream"
13953 "type:%d and stream format:%d",
13954 newStream->rotation, newStream->stream_type,
13955 newStream->format);
13956 rc = -EINVAL;
13957 break;
13958 }
13959 }
13960
13961 return rc;
13962}
13963
13964/*===========================================================================
13965* FUNCTION : getFlashInfo
13966*
13967* DESCRIPTION: Retrieve information about whether the device has a flash.
13968*
13969* PARAMETERS :
13970* @cameraId : Camera id to query
13971* @hasFlash : Boolean indicating whether there is a flash device
13972* associated with given camera
13973* @flashNode : If a flash device exists, this will be its device node.
13974*
13975* RETURN :
13976* None
13977*==========================================================================*/
13978void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13979 bool& hasFlash,
13980 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13981{
13982 cam_capability_t* camCapability = gCamCapability[cameraId];
13983 if (NULL == camCapability) {
13984 hasFlash = false;
13985 flashNode[0] = '\0';
13986 } else {
13987 hasFlash = camCapability->flash_available;
13988 strlcpy(flashNode,
13989 (char*)camCapability->flash_dev_name,
13990 QCAMERA_MAX_FILEPATH_LENGTH);
13991 }
13992}
13993
13994/*===========================================================================
13995* FUNCTION : getEepromVersionInfo
13996*
13997* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13998*
13999* PARAMETERS : None
14000*
14001* RETURN : string describing EEPROM version
14002* "\0" if no such info available
14003*==========================================================================*/
14004const char *QCamera3HardwareInterface::getEepromVersionInfo()
14005{
14006 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14007}
14008
14009/*===========================================================================
14010* FUNCTION : getLdafCalib
14011*
14012* DESCRIPTION: Retrieve Laser AF calibration data
14013*
14014* PARAMETERS : None
14015*
14016* RETURN : Two uint32_t describing laser AF calibration data
14017* NULL if none is available.
14018*==========================================================================*/
14019const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14020{
14021 if (mLdafCalibExist) {
14022 return &mLdafCalib[0];
14023 } else {
14024 return NULL;
14025 }
14026}
14027
14028/*===========================================================================
Arnd Geis082a4d72017-08-24 10:33:07 -070014029* FUNCTION : getEaselFwVersion
14030*
14031* DESCRIPTION: Retrieve Easel firmware version
14032*
14033* PARAMETERS : None
14034*
14035* RETURN : string describing Firmware version
14036* "\0" if Easel manager client is not open
14037*==========================================================================*/
14038const char *QCamera3HardwareInterface::getEaselFwVersion()
14039{
14040 int rc = NO_ERROR;
14041
14042 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
14043 ALOGD("%s: Querying Easel firmware version", __FUNCTION__);
14044 if (EaselManagerClientOpened) {
14045 rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
14046 if (rc != OK)
14047 ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
14048 else
14049 return (const char *)&mEaselFwVersion[0];
14050 }
14051 return NULL;
14052}
14053
14054/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014055 * FUNCTION : dynamicUpdateMetaStreamInfo
14056 *
14057 * DESCRIPTION: This function:
14058 * (1) stops all the channels
14059 * (2) returns error on pending requests and buffers
14060 * (3) sends metastream_info in setparams
14061 * (4) starts all channels
14062 * This is useful when sensor has to be restarted to apply any
14063 * settings such as frame rate from a different sensor mode
14064 *
14065 * PARAMETERS : None
14066 *
14067 * RETURN : NO_ERROR on success
14068 * Error codes on failure
14069 *
14070 *==========================================================================*/
14071int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14072{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014073 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014074 int rc = NO_ERROR;
14075
14076 LOGD("E");
14077
14078 rc = stopAllChannels();
14079 if (rc < 0) {
14080 LOGE("stopAllChannels failed");
14081 return rc;
14082 }
14083
14084 rc = notifyErrorForPendingRequests();
14085 if (rc < 0) {
14086 LOGE("notifyErrorForPendingRequests failed");
14087 return rc;
14088 }
14089
14090 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14091 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14092 "Format:%d",
14093 mStreamConfigInfo.type[i],
14094 mStreamConfigInfo.stream_sizes[i].width,
14095 mStreamConfigInfo.stream_sizes[i].height,
14096 mStreamConfigInfo.postprocess_mask[i],
14097 mStreamConfigInfo.format[i]);
14098 }
14099
14100 /* Send meta stream info once again so that ISP can start */
14101 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14102 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14103 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14104 mParameters);
14105 if (rc < 0) {
14106 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14107 }
14108
14109 rc = startAllChannels();
14110 if (rc < 0) {
14111 LOGE("startAllChannels failed");
14112 return rc;
14113 }
14114
14115 LOGD("X");
14116 return rc;
14117}
14118
14119/*===========================================================================
14120 * FUNCTION : stopAllChannels
14121 *
14122 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14123 *
14124 * PARAMETERS : None
14125 *
14126 * RETURN : NO_ERROR on success
14127 * Error codes on failure
14128 *
14129 *==========================================================================*/
14130int32_t QCamera3HardwareInterface::stopAllChannels()
14131{
14132 int32_t rc = NO_ERROR;
14133
14134 LOGD("Stopping all channels");
14135 // Stop the Streams/Channels
14136 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14137 it != mStreamInfo.end(); it++) {
14138 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14139 if (channel) {
14140 channel->stop();
14141 }
14142 (*it)->status = INVALID;
14143 }
14144
14145 if (mSupportChannel) {
14146 mSupportChannel->stop();
14147 }
14148 if (mAnalysisChannel) {
14149 mAnalysisChannel->stop();
14150 }
14151 if (mRawDumpChannel) {
14152 mRawDumpChannel->stop();
14153 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014154 if (mHdrPlusRawSrcChannel) {
14155 mHdrPlusRawSrcChannel->stop();
14156 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014157 if (mMetadataChannel) {
14158 /* If content of mStreamInfo is not 0, there is metadata stream */
14159 mMetadataChannel->stop();
14160 }
14161
14162 LOGD("All channels stopped");
14163 return rc;
14164}
14165
14166/*===========================================================================
14167 * FUNCTION : startAllChannels
14168 *
14169 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14170 *
14171 * PARAMETERS : None
14172 *
14173 * RETURN : NO_ERROR on success
14174 * Error codes on failure
14175 *
14176 *==========================================================================*/
14177int32_t QCamera3HardwareInterface::startAllChannels()
14178{
14179 int32_t rc = NO_ERROR;
14180
14181 LOGD("Start all channels ");
14182 // Start the Streams/Channels
14183 if (mMetadataChannel) {
14184 /* If content of mStreamInfo is not 0, there is metadata stream */
14185 rc = mMetadataChannel->start();
14186 if (rc < 0) {
14187 LOGE("META channel start failed");
14188 return rc;
14189 }
14190 }
14191 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14192 it != mStreamInfo.end(); it++) {
14193 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14194 if (channel) {
14195 rc = channel->start();
14196 if (rc < 0) {
14197 LOGE("channel start failed");
14198 return rc;
14199 }
14200 }
14201 }
14202 if (mAnalysisChannel) {
14203 mAnalysisChannel->start();
14204 }
14205 if (mSupportChannel) {
14206 rc = mSupportChannel->start();
14207 if (rc < 0) {
14208 LOGE("Support channel start failed");
14209 return rc;
14210 }
14211 }
14212 if (mRawDumpChannel) {
14213 rc = mRawDumpChannel->start();
14214 if (rc < 0) {
14215 LOGE("RAW dump channel start failed");
14216 return rc;
14217 }
14218 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014219 if (mHdrPlusRawSrcChannel) {
14220 rc = mHdrPlusRawSrcChannel->start();
14221 if (rc < 0) {
14222 LOGE("HDR+ RAW channel start failed");
14223 return rc;
14224 }
14225 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014226
14227 LOGD("All channels started");
14228 return rc;
14229}
14230
14231/*===========================================================================
14232 * FUNCTION : notifyErrorForPendingRequests
14233 *
14234 * DESCRIPTION: This function sends error for all the pending requests/buffers
14235 *
14236 * PARAMETERS : None
14237 *
14238 * RETURN : Error codes
14239 * NO_ERROR on success
14240 *
14241 *==========================================================================*/
14242int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14243{
Emilian Peev7650c122017-01-19 08:24:33 -080014244 notifyErrorFoPendingDepthData(mDepthChannel);
14245
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014246 auto pendingRequest = mPendingRequestsList.begin();
14247 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014248
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014249 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14250 // buffers (for which buffers aren't sent yet).
14251 while (pendingRequest != mPendingRequestsList.end() ||
14252 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14253 if (pendingRequest == mPendingRequestsList.end() ||
14254 pendingBuffer->frame_number < pendingRequest->frame_number) {
14255 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14256 // with error.
14257 for (auto &info : pendingBuffer->mPendingBufferList) {
14258 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014259 camera3_notify_msg_t notify_msg;
14260 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14261 notify_msg.type = CAMERA3_MSG_ERROR;
14262 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014263 notify_msg.message.error.error_stream = info.stream;
14264 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014265 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014266
14267 camera3_stream_buffer_t buffer = {};
14268 buffer.acquire_fence = -1;
14269 buffer.release_fence = -1;
14270 buffer.buffer = info.buffer;
14271 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14272 buffer.stream = info.stream;
14273 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014274 }
14275
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014276 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14277 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14278 pendingBuffer->frame_number > pendingRequest->frame_number) {
14279 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014280 camera3_notify_msg_t notify_msg;
14281 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14282 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014283 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14284 notify_msg.message.error.error_stream = nullptr;
14285 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014286 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014287
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014288 if (pendingRequest->input_buffer != nullptr) {
14289 camera3_capture_result result = {};
14290 result.frame_number = pendingRequest->frame_number;
14291 result.result = nullptr;
14292 result.input_buffer = pendingRequest->input_buffer;
14293 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014294 }
14295
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014296 mShutterDispatcher.clear(pendingRequest->frame_number);
14297 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14298 } else {
14299 // If both buffers and result metadata weren't sent yet, notify about a request error
14300 // and return buffers with error.
14301 for (auto &info : pendingBuffer->mPendingBufferList) {
14302 camera3_notify_msg_t notify_msg;
14303 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14304 notify_msg.type = CAMERA3_MSG_ERROR;
14305 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14306 notify_msg.message.error.error_stream = info.stream;
14307 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14308 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014309
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014310 camera3_stream_buffer_t buffer = {};
14311 buffer.acquire_fence = -1;
14312 buffer.release_fence = -1;
14313 buffer.buffer = info.buffer;
14314 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14315 buffer.stream = info.stream;
14316 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14317 }
14318
14319 if (pendingRequest->input_buffer != nullptr) {
14320 camera3_capture_result result = {};
14321 result.frame_number = pendingRequest->frame_number;
14322 result.result = nullptr;
14323 result.input_buffer = pendingRequest->input_buffer;
14324 orchestrateResult(&result);
14325 }
14326
14327 mShutterDispatcher.clear(pendingRequest->frame_number);
14328 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14329 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014330 }
14331 }
14332
14333 /* Reset pending frame Drop list and requests list */
14334 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014335 mShutterDispatcher.clear();
14336 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014337 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014338 mExpectedFrameDuration = 0;
14339 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014340 LOGH("Cleared all the pending buffers ");
14341
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014342 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014343}
14344
14345bool QCamera3HardwareInterface::isOnEncoder(
14346 const cam_dimension_t max_viewfinder_size,
14347 uint32_t width, uint32_t height)
14348{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014349 return ((width > (uint32_t)max_viewfinder_size.width) ||
14350 (height > (uint32_t)max_viewfinder_size.height) ||
14351 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14352 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014353}
14354
14355/*===========================================================================
14356 * FUNCTION : setBundleInfo
14357 *
14358 * DESCRIPTION: Set bundle info for all streams that are bundle.
14359 *
14360 * PARAMETERS : None
14361 *
14362 * RETURN : NO_ERROR on success
14363 * Error codes on failure
14364 *==========================================================================*/
14365int32_t QCamera3HardwareInterface::setBundleInfo()
14366{
14367 int32_t rc = NO_ERROR;
14368
14369 if (mChannelHandle) {
14370 cam_bundle_config_t bundleInfo;
14371 memset(&bundleInfo, 0, sizeof(bundleInfo));
14372 rc = mCameraHandle->ops->get_bundle_info(
14373 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14374 if (rc != NO_ERROR) {
14375 LOGE("get_bundle_info failed");
14376 return rc;
14377 }
14378 if (mAnalysisChannel) {
14379 mAnalysisChannel->setBundleInfo(bundleInfo);
14380 }
14381 if (mSupportChannel) {
14382 mSupportChannel->setBundleInfo(bundleInfo);
14383 }
14384 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14385 it != mStreamInfo.end(); it++) {
14386 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14387 channel->setBundleInfo(bundleInfo);
14388 }
14389 if (mRawDumpChannel) {
14390 mRawDumpChannel->setBundleInfo(bundleInfo);
14391 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014392 if (mHdrPlusRawSrcChannel) {
14393 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14394 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014395 }
14396
14397 return rc;
14398}
14399
14400/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014401 * FUNCTION : setInstantAEC
14402 *
14403 * DESCRIPTION: Set Instant AEC related params.
14404 *
14405 * PARAMETERS :
14406 * @meta: CameraMetadata reference
14407 *
14408 * RETURN : NO_ERROR on success
14409 * Error codes on failure
14410 *==========================================================================*/
14411int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14412{
14413 int32_t rc = NO_ERROR;
14414 uint8_t val = 0;
14415 char prop[PROPERTY_VALUE_MAX];
14416
14417 // First try to configure instant AEC from framework metadata
14418 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14419 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14420 }
14421
14422 // If framework did not set this value, try to read from set prop.
14423 if (val == 0) {
14424 memset(prop, 0, sizeof(prop));
14425 property_get("persist.camera.instant.aec", prop, "0");
14426 val = (uint8_t)atoi(prop);
14427 }
14428
14429 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14430 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14431 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14432 mInstantAEC = val;
14433 mInstantAECSettledFrameNumber = 0;
14434 mInstantAecFrameIdxCount = 0;
14435 LOGH("instantAEC value set %d",val);
14436 if (mInstantAEC) {
14437 memset(prop, 0, sizeof(prop));
14438 property_get("persist.camera.ae.instant.bound", prop, "10");
14439 int32_t aec_frame_skip_cnt = atoi(prop);
14440 if (aec_frame_skip_cnt >= 0) {
14441 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14442 } else {
14443 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14444 rc = BAD_VALUE;
14445 }
14446 }
14447 } else {
14448 LOGE("Bad instant aec value set %d", val);
14449 rc = BAD_VALUE;
14450 }
14451 return rc;
14452}
14453
14454/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014455 * FUNCTION : get_num_overall_buffers
14456 *
14457 * DESCRIPTION: Estimate number of pending buffers across all requests.
14458 *
14459 * PARAMETERS : None
14460 *
14461 * RETURN : Number of overall pending buffers
14462 *
14463 *==========================================================================*/
14464uint32_t PendingBuffersMap::get_num_overall_buffers()
14465{
14466 uint32_t sum_buffers = 0;
14467 for (auto &req : mPendingBuffersInRequest) {
14468 sum_buffers += req.mPendingBufferList.size();
14469 }
14470 return sum_buffers;
14471}
14472
14473/*===========================================================================
14474 * FUNCTION : removeBuf
14475 *
14476 * DESCRIPTION: Remove a matching buffer from tracker.
14477 *
14478 * PARAMETERS : @buffer: image buffer for the callback
14479 *
14480 * RETURN : None
14481 *
14482 *==========================================================================*/
14483void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14484{
14485 bool buffer_found = false;
14486 for (auto req = mPendingBuffersInRequest.begin();
14487 req != mPendingBuffersInRequest.end(); req++) {
14488 for (auto k = req->mPendingBufferList.begin();
14489 k != req->mPendingBufferList.end(); k++ ) {
14490 if (k->buffer == buffer) {
14491 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14492 req->frame_number, buffer);
14493 k = req->mPendingBufferList.erase(k);
14494 if (req->mPendingBufferList.empty()) {
14495 // Remove this request from Map
14496 req = mPendingBuffersInRequest.erase(req);
14497 }
14498 buffer_found = true;
14499 break;
14500 }
14501 }
14502 if (buffer_found) {
14503 break;
14504 }
14505 }
14506 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14507 get_num_overall_buffers());
14508}
14509
14510/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014511 * FUNCTION : getBufErrStatus
14512 *
14513 * DESCRIPTION: get buffer error status
14514 *
14515 * PARAMETERS : @buffer: buffer handle
14516 *
14517 * RETURN : Error status
14518 *
14519 *==========================================================================*/
14520int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14521{
14522 for (auto& req : mPendingBuffersInRequest) {
14523 for (auto& k : req.mPendingBufferList) {
14524 if (k.buffer == buffer)
14525 return k.bufStatus;
14526 }
14527 }
14528 return CAMERA3_BUFFER_STATUS_OK;
14529}
14530
14531/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014532 * FUNCTION : setPAAFSupport
14533 *
14534 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14535 * feature mask according to stream type and filter
14536 * arrangement
14537 *
14538 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14539 * @stream_type: stream type
14540 * @filter_arrangement: filter arrangement
14541 *
14542 * RETURN : None
14543 *==========================================================================*/
14544void QCamera3HardwareInterface::setPAAFSupport(
14545 cam_feature_mask_t& feature_mask,
14546 cam_stream_type_t stream_type,
14547 cam_color_filter_arrangement_t filter_arrangement)
14548{
Thierry Strudel3d639192016-09-09 11:52:26 -070014549 switch (filter_arrangement) {
14550 case CAM_FILTER_ARRANGEMENT_RGGB:
14551 case CAM_FILTER_ARRANGEMENT_GRBG:
14552 case CAM_FILTER_ARRANGEMENT_GBRG:
14553 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014554 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14555 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014556 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014557 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14558 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014559 }
14560 break;
14561 case CAM_FILTER_ARRANGEMENT_Y:
14562 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14563 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14564 }
14565 break;
14566 default:
14567 break;
14568 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014569 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14570 feature_mask, stream_type, filter_arrangement);
14571
14572
Thierry Strudel3d639192016-09-09 11:52:26 -070014573}
14574
14575/*===========================================================================
14576* FUNCTION : getSensorMountAngle
14577*
14578* DESCRIPTION: Retrieve sensor mount angle
14579*
14580* PARAMETERS : None
14581*
14582* RETURN : sensor mount angle in uint32_t
14583*==========================================================================*/
14584uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14585{
14586 return gCamCapability[mCameraId]->sensor_mount_angle;
14587}
14588
14589/*===========================================================================
14590* FUNCTION : getRelatedCalibrationData
14591*
14592* DESCRIPTION: Retrieve related system calibration data
14593*
14594* PARAMETERS : None
14595*
14596* RETURN : Pointer of related system calibration data
14597*==========================================================================*/
14598const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14599{
14600 return (const cam_related_system_calibration_data_t *)
14601 &(gCamCapability[mCameraId]->related_cam_calibration);
14602}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014603
14604/*===========================================================================
14605 * FUNCTION : is60HzZone
14606 *
14607 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14608 *
14609 * PARAMETERS : None
14610 *
14611 * RETURN : True if in 60Hz zone, False otherwise
14612 *==========================================================================*/
14613bool QCamera3HardwareInterface::is60HzZone()
14614{
14615 time_t t = time(NULL);
14616 struct tm lt;
14617
14618 struct tm* r = localtime_r(&t, &lt);
14619
14620 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14621 return true;
14622 else
14623 return false;
14624}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014625
14626/*===========================================================================
14627 * FUNCTION : adjustBlackLevelForCFA
14628 *
14629 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14630 * of bayer CFA (Color Filter Array).
14631 *
14632 * PARAMETERS : @input: black level pattern in the order of RGGB
14633 * @output: black level pattern in the order of CFA
14634 * @color_arrangement: CFA color arrangement
14635 *
14636 * RETURN : None
14637 *==========================================================================*/
14638template<typename T>
14639void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14640 T input[BLACK_LEVEL_PATTERN_CNT],
14641 T output[BLACK_LEVEL_PATTERN_CNT],
14642 cam_color_filter_arrangement_t color_arrangement)
14643{
14644 switch (color_arrangement) {
14645 case CAM_FILTER_ARRANGEMENT_GRBG:
14646 output[0] = input[1];
14647 output[1] = input[0];
14648 output[2] = input[3];
14649 output[3] = input[2];
14650 break;
14651 case CAM_FILTER_ARRANGEMENT_GBRG:
14652 output[0] = input[2];
14653 output[1] = input[3];
14654 output[2] = input[0];
14655 output[3] = input[1];
14656 break;
14657 case CAM_FILTER_ARRANGEMENT_BGGR:
14658 output[0] = input[3];
14659 output[1] = input[2];
14660 output[2] = input[1];
14661 output[3] = input[0];
14662 break;
14663 case CAM_FILTER_ARRANGEMENT_RGGB:
14664 output[0] = input[0];
14665 output[1] = input[1];
14666 output[2] = input[2];
14667 output[3] = input[3];
14668 break;
14669 default:
14670 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14671 break;
14672 }
14673}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014674
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014675void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14676 CameraMetadata &resultMetadata,
14677 std::shared_ptr<metadata_buffer_t> settings)
14678{
14679 if (settings == nullptr) {
14680 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14681 return;
14682 }
14683
14684 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14685 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14686 }
14687
14688 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14689 String8 str((const char *)gps_methods);
14690 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14691 }
14692
14693 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14694 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14695 }
14696
14697 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14698 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14699 }
14700
14701 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14702 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14703 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14704 }
14705
14706 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14707 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14708 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14709 }
14710
14711 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14712 int32_t fwk_thumb_size[2];
14713 fwk_thumb_size[0] = thumb_size->width;
14714 fwk_thumb_size[1] = thumb_size->height;
14715 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14716 }
14717
14718 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14719 uint8_t fwk_intent = intent[0];
14720 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14721 }
14722}
14723
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014724bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
14725 const camera3_capture_request_t &request, const CameraMetadata &metadata) {
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014726 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14727 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14728 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014729 ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014730 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014731 return false;
14732 }
14733
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014734 if (!metadata.exists(ANDROID_EDGE_MODE) ||
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014735 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14736 ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014737 return false;
14738 }
14739
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014740 if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
14741 metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
14742 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
14743 ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
14744 return false;
14745 }
14746
14747 if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
14748 (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
14749 metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
14750 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
14751 ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
14752 return false;
14753 }
14754
14755 if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
14756 metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
14757 ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
14758 return false;
14759 }
14760
14761 if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
14762 metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
14763 ANDROID_CONTROL_EFFECT_MODE_OFF) {
14764 ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
14765 return false;
14766 }
14767
14768 if (!metadata.exists(ANDROID_CONTROL_MODE) ||
14769 (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
14770 metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
14771 ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
14772 ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
14773 return false;
14774 }
14775
14776 // TODO (b/32585046): support non-ZSL.
14777 if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
14778 metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
14779 ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
14780 return false;
14781 }
14782
14783 // TODO (b/32586081): support flash.
14784 if (!metadata.exists(ANDROID_FLASH_MODE) ||
14785 metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
14786 ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
14787 return false;
14788 }
14789
14790 // TODO (b/36492953): support digital zoom.
14791 if (!metadata.exists(ANDROID_SCALER_CROP_REGION) ||
14792 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[0] != 0 ||
14793 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[1] != 0 ||
14794 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[2] !=
14795 gCamCapability[mCameraId]->active_array_size.width ||
14796 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[3] !=
14797 gCamCapability[mCameraId]->active_array_size.height) {
14798 ALOGV("%s: ANDROID_SCALER_CROP_REGION is not the same as active array region.",
14799 __FUNCTION__);
14800 return false;
14801 }
14802
14803 if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
14804 metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
14805 ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
14806 return false;
14807 }
14808
14809 // TODO (b/36693254, b/36690506): support other outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014810 if (request.num_output_buffers != 1 ||
14811 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014812 ALOGV("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014813 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014814 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014815 request.output_buffers[0].stream->width,
14816 request.output_buffers[0].stream->height,
14817 request.output_buffers[0].stream->format);
14818 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014819 return false;
14820 }
14821
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014822 return true;
14823}
14824
14825bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14826 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14827 const CameraMetadata &metadata)
14828{
14829 if (hdrPlusRequest == nullptr) return false;
14830 if (!isRequestHdrPlusCompatible(request, metadata)) return false;
14831
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014832 // Get a YUV buffer from pic channel.
14833 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14834 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14835 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14836 if (res != OK) {
14837 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14838 __FUNCTION__, strerror(-res), res);
14839 return false;
14840 }
14841
14842 pbcamera::StreamBuffer buffer;
14843 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014844 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chencec36ed2017-07-21 13:54:29 -070014845 buffer.data = yuvBuffer->fd == -1 ? yuvBuffer->buffer : nullptr;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014846 buffer.dataSize = yuvBuffer->frame_len;
14847
14848 pbcamera::CaptureRequest pbRequest;
14849 pbRequest.id = request.frame_number;
14850 pbRequest.outputBuffers.push_back(buffer);
14851
14852 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070014853 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014854 if (res != OK) {
14855 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14856 strerror(-res), res);
14857 return false;
14858 }
14859
14860 hdrPlusRequest->yuvBuffer = yuvBuffer;
14861 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14862
14863 return true;
14864}
14865
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014866status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14867{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014868 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14869 return OK;
14870 }
14871
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014872 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014873 if (res != OK) {
14874 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14875 strerror(-res), res);
14876 return res;
14877 }
14878 gHdrPlusClientOpening = true;
14879
14880 return OK;
14881}
14882
Chien-Yu Chenee335912017-02-09 17:53:20 -080014883status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14884{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014885 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014886
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014887 if (mHdrPlusModeEnabled) {
14888 return OK;
14889 }
14890
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014891 // Check if gHdrPlusClient is opened or being opened.
14892 if (gHdrPlusClient == nullptr) {
14893 if (gHdrPlusClientOpening) {
14894 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14895 return OK;
14896 }
14897
14898 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014899 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014900 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14901 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014902 return res;
14903 }
14904
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014905 // When opening HDR+ client completes, HDR+ mode will be enabled.
14906 return OK;
14907
Chien-Yu Chenee335912017-02-09 17:53:20 -080014908 }
14909
14910 // Configure stream for HDR+.
14911 res = configureHdrPlusStreamsLocked();
14912 if (res != OK) {
14913 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014914 return res;
14915 }
14916
14917 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14918 res = gHdrPlusClient->setZslHdrPlusMode(true);
14919 if (res != OK) {
14920 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014921 return res;
14922 }
14923
14924 mHdrPlusModeEnabled = true;
14925 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14926
14927 return OK;
14928}
14929
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014930void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
14931{
14932 if (gHdrPlusClientOpening) {
14933 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
14934 }
14935}
14936
Chien-Yu Chenee335912017-02-09 17:53:20 -080014937void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14938{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014939 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014940 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014941 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14942 if (res != OK) {
14943 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14944 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014945
14946 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014947 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014948 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014949 }
14950
14951 mHdrPlusModeEnabled = false;
14952 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14953}
14954
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070014955bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
14956{
14957 // Check if mPictureChannel is valid.
14958 // TODO: Support YUV (b/36693254) and RAW (b/36690506)
14959 if (mPictureChannel == nullptr) {
14960 return false;
14961 }
14962
14963 return true;
14964}
14965
Chien-Yu Chenee335912017-02-09 17:53:20 -080014966status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014967{
14968 pbcamera::InputConfiguration inputConfig;
14969 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14970 status_t res = OK;
14971
14972 // Configure HDR+ client streams.
14973 // Get input config.
14974 if (mHdrPlusRawSrcChannel) {
14975 // HDR+ input buffers will be provided by HAL.
14976 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14977 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14978 if (res != OK) {
14979 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14980 __FUNCTION__, strerror(-res), res);
14981 return res;
14982 }
14983
14984 inputConfig.isSensorInput = false;
14985 } else {
14986 // Sensor MIPI will send data to Easel.
14987 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014988 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014989 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14990 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14991 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14992 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14993 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014994 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014995 if (mSensorModeInfo.num_raw_bits != 10) {
14996 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14997 mSensorModeInfo.num_raw_bits);
14998 return BAD_VALUE;
14999 }
15000
15001 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015002 }
15003
15004 // Get output configurations.
15005 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080015006 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015007
15008 // Easel may need to output YUV output buffers if mPictureChannel was created.
15009 pbcamera::StreamConfiguration yuvOutputConfig;
15010 if (mPictureChannel != nullptr) {
15011 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
15012 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
15013 if (res != OK) {
15014 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
15015 __FUNCTION__, strerror(-res), res);
15016
15017 return res;
15018 }
15019
15020 outputStreamConfigs.push_back(yuvOutputConfig);
15021 }
15022
15023 // TODO: consider other channels for YUV output buffers.
15024
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015025 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015026 if (res != OK) {
15027 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
15028 strerror(-res), res);
15029 return res;
15030 }
15031
15032 return OK;
15033}
15034
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015035void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
15036{
15037 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
15038 // Set HAL state to error.
15039 pthread_mutex_lock(&mMutex);
15040 mState = ERROR;
15041 pthread_mutex_unlock(&mMutex);
15042
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015043 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015044}
15045
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015046void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15047{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015048 if (client == nullptr) {
15049 ALOGE("%s: Opened client is null.", __FUNCTION__);
15050 return;
15051 }
15052
Chien-Yu Chene96475e2017-04-11 11:53:26 -070015053 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015054 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15055
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015056 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015057 if (!gHdrPlusClientOpening) {
15058 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15059 return;
15060 }
15061
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015062 gHdrPlusClient = std::move(client);
15063 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015064 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015065
15066 // Set static metadata.
15067 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15068 if (res != OK) {
15069 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15070 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015071 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015072 gHdrPlusClient = nullptr;
15073 return;
15074 }
15075
15076 // Enable HDR+ mode.
15077 res = enableHdrPlusModeLocked();
15078 if (res != OK) {
15079 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15080 }
15081}
15082
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015083void QCamera3HardwareInterface::onOpenFailed(status_t err)
15084{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015085 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015086 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015087 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015088 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015089}
15090
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015091void QCamera3HardwareInterface::onFatalError()
15092{
15093 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
15094
15095 // Set HAL state to error.
15096 pthread_mutex_lock(&mMutex);
15097 mState = ERROR;
15098 pthread_mutex_unlock(&mMutex);
15099
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015100 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015101}
15102
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070015103void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15104{
15105 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15106 __LINE__, requestId, apSensorTimestampNs);
15107
15108 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15109}
15110
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015111void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
15112{
15113 pthread_mutex_lock(&mMutex);
15114
15115 // Find the pending request for this result metadata.
15116 auto requestIter = mPendingRequestsList.begin();
15117 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15118 requestIter++;
15119 }
15120
15121 if (requestIter == mPendingRequestsList.end()) {
15122 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15123 pthread_mutex_unlock(&mMutex);
15124 return;
15125 }
15126
15127 requestIter->partial_result_cnt++;
15128
15129 CameraMetadata metadata;
15130 uint8_t ready = true;
15131 metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
15132
15133 // Send it to framework.
15134 camera3_capture_result_t result = {};
15135
15136 result.result = metadata.getAndLock();
15137 // Populate metadata result
15138 result.frame_number = requestId;
15139 result.num_output_buffers = 0;
15140 result.output_buffers = NULL;
15141 result.partial_result = requestIter->partial_result_cnt;
15142
15143 orchestrateResult(&result);
15144 metadata.unlock(result.result);
15145
15146 pthread_mutex_unlock(&mMutex);
15147}
15148
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070015149void QCamera3HardwareInterface::onPostview(uint32_t requestId,
15150 std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
15151 uint32_t stride, int32_t format)
15152{
15153 if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
15154 ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
15155 __LINE__, width, height, requestId);
15156 char buf[FILENAME_MAX] = {};
15157 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
15158 requestId, width, height);
15159
15160 pbcamera::StreamConfiguration config = {};
15161 config.image.width = width;
15162 config.image.height = height;
15163 config.image.format = format;
15164
15165 pbcamera::PlaneConfiguration plane = {};
15166 plane.stride = stride;
15167 plane.scanline = height;
15168
15169 config.image.planes.push_back(plane);
15170
15171 pbcamera::StreamBuffer buffer = {};
15172 buffer.streamId = 0;
15173 buffer.dmaBufFd = -1;
15174 buffer.data = postview->data();
15175 buffer.dataSize = postview->size();
15176
15177 hdrplus_client_utils::writePpm(buf, config, buffer);
15178 }
15179
15180 pthread_mutex_lock(&mMutex);
15181
15182 // Find the pending request for this result metadata.
15183 auto requestIter = mPendingRequestsList.begin();
15184 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15185 requestIter++;
15186 }
15187
15188 if (requestIter == mPendingRequestsList.end()) {
15189 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15190 pthread_mutex_unlock(&mMutex);
15191 return;
15192 }
15193
15194 requestIter->partial_result_cnt++;
15195
15196 CameraMetadata metadata;
15197 int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
15198 static_cast<int32_t>(stride)};
15199 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
15200 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
15201
15202 // Send it to framework.
15203 camera3_capture_result_t result = {};
15204
15205 result.result = metadata.getAndLock();
15206 // Populate metadata result
15207 result.frame_number = requestId;
15208 result.num_output_buffers = 0;
15209 result.output_buffers = NULL;
15210 result.partial_result = requestIter->partial_result_cnt;
15211
15212 orchestrateResult(&result);
15213 metadata.unlock(result.result);
15214
15215 pthread_mutex_unlock(&mMutex);
15216}
15217
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015218void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015219 const camera_metadata_t &resultMetadata)
15220{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015221 if (result != nullptr) {
15222 if (result->outputBuffers.size() != 1) {
15223 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
15224 result->outputBuffers.size());
15225 return;
15226 }
15227
15228 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
15229 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
15230 result->outputBuffers[0].streamId);
15231 return;
15232 }
15233
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015234 // TODO (b/34854987): initiate this from HDR+ service.
15235 onNextCaptureReady(result->requestId);
15236
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015237 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015238 HdrPlusPendingRequest pendingRequest;
15239 {
15240 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15241 auto req = mHdrPlusPendingRequests.find(result->requestId);
15242 pendingRequest = req->second;
15243 }
15244
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015245 // Update the result metadata with the settings of the HDR+ still capture request because
15246 // the result metadata belongs to a ZSL buffer.
15247 CameraMetadata metadata;
15248 metadata = &resultMetadata;
15249 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15250 camera_metadata_t* updatedResultMetadata = metadata.release();
15251
15252 QCamera3PicChannel *picChannel =
15253 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
15254
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015255 // Check if dumping HDR+ YUV output is enabled.
15256 char prop[PROPERTY_VALUE_MAX];
15257 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15258 bool dumpYuvOutput = atoi(prop);
15259
15260 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015261 // Dump yuv buffer to a ppm file.
15262 pbcamera::StreamConfiguration outputConfig;
15263 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
15264 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
15265 if (rc == OK) {
15266 char buf[FILENAME_MAX] = {};
15267 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15268 result->requestId, result->outputBuffers[0].streamId,
15269 outputConfig.image.width, outputConfig.image.height);
15270
15271 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
15272 } else {
15273 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
15274 __FUNCTION__, strerror(-rc), rc);
15275 }
15276 }
15277
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015278 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
15279 auto halMetadata = std::make_shared<metadata_buffer_t>();
15280 clear_metadata_buffer(halMetadata.get());
15281
15282 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
15283 // encoding.
15284 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15285 halStreamId, /*minFrameDuration*/0);
15286 if (res == OK) {
15287 // Return the buffer to pic channel for encoding.
15288 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
15289 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
15290 halMetadata);
15291 } else {
15292 // Return the buffer without encoding.
15293 // TODO: This should not happen but we may want to report an error buffer to camera
15294 // service.
15295 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
15296 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
15297 strerror(-res), res);
15298 }
15299
15300 // Send HDR+ metadata to framework.
15301 {
15302 pthread_mutex_lock(&mMutex);
15303
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015304 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15305 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015306 pthread_mutex_unlock(&mMutex);
15307 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015308
15309 // Remove the HDR+ pending request.
15310 {
15311 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15312 auto req = mHdrPlusPendingRequests.find(result->requestId);
15313 mHdrPlusPendingRequests.erase(req);
15314 }
15315 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015316}
15317
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015318void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15319{
15320 if (failedResult == nullptr) {
15321 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15322 return;
15323 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015324
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015325 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015326
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015327 // Remove the pending HDR+ request.
15328 {
15329 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15330 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
15331
15332 // Return the buffer to pic channel.
15333 QCamera3PicChannel *picChannel =
15334 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
15335 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
15336
15337 mHdrPlusPendingRequests.erase(pendingRequest);
15338 }
15339
15340 pthread_mutex_lock(&mMutex);
15341
15342 // Find the pending buffers.
15343 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15344 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15345 if (pendingBuffers->frame_number == failedResult->requestId) {
15346 break;
15347 }
15348 pendingBuffers++;
15349 }
15350
15351 // Send out buffer errors for the pending buffers.
15352 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15353 std::vector<camera3_stream_buffer_t> streamBuffers;
15354 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15355 // Prepare a stream buffer.
15356 camera3_stream_buffer_t streamBuffer = {};
15357 streamBuffer.stream = buffer.stream;
15358 streamBuffer.buffer = buffer.buffer;
15359 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15360 streamBuffer.acquire_fence = -1;
15361 streamBuffer.release_fence = -1;
15362
15363 streamBuffers.push_back(streamBuffer);
15364
15365 // Send out error buffer event.
15366 camera3_notify_msg_t notify_msg = {};
15367 notify_msg.type = CAMERA3_MSG_ERROR;
15368 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15369 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15370 notify_msg.message.error.error_stream = buffer.stream;
15371
15372 orchestrateNotify(&notify_msg);
15373 }
15374
15375 camera3_capture_result_t result = {};
15376 result.frame_number = pendingBuffers->frame_number;
15377 result.num_output_buffers = streamBuffers.size();
15378 result.output_buffers = &streamBuffers[0];
15379
15380 // Send out result with buffer errors.
15381 orchestrateResult(&result);
15382
15383 // Remove pending buffers.
15384 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15385 }
15386
15387 // Remove pending request.
15388 auto halRequest = mPendingRequestsList.begin();
15389 while (halRequest != mPendingRequestsList.end()) {
15390 if (halRequest->frame_number == failedResult->requestId) {
15391 mPendingRequestsList.erase(halRequest);
15392 break;
15393 }
15394 halRequest++;
15395 }
15396
15397 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015398}
15399
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015400
15401ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15402 mParent(parent) {}
15403
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015404void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015405{
15406 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015407
15408 if (isReprocess) {
15409 mReprocessShutters.emplace(frameNumber, Shutter());
15410 } else {
15411 mShutters.emplace(frameNumber, Shutter());
15412 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015413}
15414
15415void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15416{
15417 std::lock_guard<std::mutex> lock(mLock);
15418
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015419 std::map<uint32_t, Shutter> *shutters = nullptr;
15420
15421 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015422 auto shutter = mShutters.find(frameNumber);
15423 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015424 shutter = mReprocessShutters.find(frameNumber);
15425 if (shutter == mReprocessShutters.end()) {
15426 // Shutter was already sent.
15427 return;
15428 }
15429 shutters = &mReprocessShutters;
15430 } else {
15431 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015432 }
15433
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015434 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015435 shutter->second.ready = true;
15436 shutter->second.timestamp = timestamp;
15437
15438 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015439 shutter = shutters->begin();
15440 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015441 if (!shutter->second.ready) {
15442 // If this shutter is not ready, the following shutters can't be sent.
15443 break;
15444 }
15445
15446 camera3_notify_msg_t msg = {};
15447 msg.type = CAMERA3_MSG_SHUTTER;
15448 msg.message.shutter.frame_number = shutter->first;
15449 msg.message.shutter.timestamp = shutter->second.timestamp;
15450 mParent->orchestrateNotify(&msg);
15451
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015452 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015453 }
15454}
15455
15456void ShutterDispatcher::clear(uint32_t frameNumber)
15457{
15458 std::lock_guard<std::mutex> lock(mLock);
15459 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015460 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015461}
15462
15463void ShutterDispatcher::clear()
15464{
15465 std::lock_guard<std::mutex> lock(mLock);
15466
15467 // Log errors for stale shutters.
15468 for (auto &shutter : mShutters) {
15469 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15470 __FUNCTION__, shutter.first, shutter.second.ready,
15471 shutter.second.timestamp);
15472 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015473
15474 // Log errors for stale reprocess shutters.
15475 for (auto &shutter : mReprocessShutters) {
15476 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15477 __FUNCTION__, shutter.first, shutter.second.ready,
15478 shutter.second.timestamp);
15479 }
15480
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015481 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015482 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015483}
15484
15485OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15486 mParent(parent) {}
15487
15488status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15489{
15490 std::lock_guard<std::mutex> lock(mLock);
15491 mStreamBuffers.clear();
15492 if (!streamList) {
15493 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15494 return -EINVAL;
15495 }
15496
15497 // Create a "frame-number -> buffer" map for each stream.
15498 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15499 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15500 }
15501
15502 return OK;
15503}
15504
15505status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15506{
15507 std::lock_guard<std::mutex> lock(mLock);
15508
15509 // Find the "frame-number -> buffer" map for the stream.
15510 auto buffers = mStreamBuffers.find(stream);
15511 if (buffers == mStreamBuffers.end()) {
15512 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15513 return -EINVAL;
15514 }
15515
15516 // Create an unready buffer for this frame number.
15517 buffers->second.emplace(frameNumber, Buffer());
15518 return OK;
15519}
15520
15521void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15522 const camera3_stream_buffer_t &buffer)
15523{
15524 std::lock_guard<std::mutex> lock(mLock);
15525
15526 // Find the frame number -> buffer map for the stream.
15527 auto buffers = mStreamBuffers.find(buffer.stream);
15528 if (buffers == mStreamBuffers.end()) {
15529 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15530 return;
15531 }
15532
15533 // Find the unready buffer this frame number and mark it ready.
15534 auto pendingBuffer = buffers->second.find(frameNumber);
15535 if (pendingBuffer == buffers->second.end()) {
15536 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15537 return;
15538 }
15539
15540 pendingBuffer->second.ready = true;
15541 pendingBuffer->second.buffer = buffer;
15542
15543 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15544 pendingBuffer = buffers->second.begin();
15545 while (pendingBuffer != buffers->second.end()) {
15546 if (!pendingBuffer->second.ready) {
15547 // If this buffer is not ready, the following buffers can't be sent.
15548 break;
15549 }
15550
15551 camera3_capture_result_t result = {};
15552 result.frame_number = pendingBuffer->first;
15553 result.num_output_buffers = 1;
15554 result.output_buffers = &pendingBuffer->second.buffer;
15555
15556 // Send out result with buffer errors.
15557 mParent->orchestrateResult(&result);
15558
15559 pendingBuffer = buffers->second.erase(pendingBuffer);
15560 }
15561}
15562
15563void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15564{
15565 std::lock_guard<std::mutex> lock(mLock);
15566
15567 // Log errors for stale buffers.
15568 for (auto &buffers : mStreamBuffers) {
15569 for (auto &buffer : buffers.second) {
15570 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15571 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15572 }
15573 buffers.second.clear();
15574 }
15575
15576 if (clearConfiguredStreams) {
15577 mStreamBuffers.clear();
15578 }
15579}
15580
Thierry Strudel3d639192016-09-09 11:52:26 -070015581}; //end namespace qcamera