blob: 124405dfb55703eda77424070498b54d98046020 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070075// mm_camera has 2 partial results: 3A, and final result.
76// HDR+ requests have 3 partial results: postview, next request ready, and final result.
77#define PARTIAL_RESULT_COUNT 3
Thierry Strudel3d639192016-09-09 11:52:26 -070078#define FRAME_SKIP_DELAY 0
79
80#define MAX_VALUE_8BIT ((1<<8)-1)
81#define MAX_VALUE_10BIT ((1<<10)-1)
82#define MAX_VALUE_12BIT ((1<<12)-1)
83
84#define VIDEO_4K_WIDTH 3840
85#define VIDEO_4K_HEIGHT 2160
86
Jason Leeb9e76432017-03-10 17:14:19 -080087#define MAX_EIS_WIDTH 3840
88#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070089
90#define MAX_RAW_STREAMS 1
91#define MAX_STALLING_STREAMS 1
92#define MAX_PROCESSED_STREAMS 3
93/* Batch mode is enabled only if FPS set is equal to or greater than this */
94#define MIN_FPS_FOR_BATCH_MODE (120)
95#define PREVIEW_FPS_FOR_HFR (30)
96#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080097#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070098#define MAX_HFR_BATCH_SIZE (8)
99#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +0100101#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700137// Whether to check for the GPU stride padding, or use the default
138//#define CHECK_GPU_PIXEL_ALIGNMENT
139
Thierry Strudel3d639192016-09-09 11:52:26 -0700140cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142extern pthread_mutex_t gCamLock;
143volatile uint32_t gCamHal3LogLevel = 1;
144extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800146// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700148std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700152std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700155
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800156// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
157bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700158
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700159std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700160
Thierry Strudel3d639192016-09-09 11:52:26 -0700161
162const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
163 {"On", CAM_CDS_MODE_ON},
164 {"Off", CAM_CDS_MODE_OFF},
165 {"Auto",CAM_CDS_MODE_AUTO}
166};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700167const QCamera3HardwareInterface::QCameraMap<
168 camera_metadata_enum_android_video_hdr_mode_t,
169 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
170 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
171 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
172};
173
Thierry Strudel54dc9782017-02-15 12:12:10 -0800174const QCamera3HardwareInterface::QCameraMap<
175 camera_metadata_enum_android_binning_correction_mode_t,
176 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
177 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
178 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
179};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700180
181const QCamera3HardwareInterface::QCameraMap<
182 camera_metadata_enum_android_ir_mode_t,
183 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
184 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
185 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
186 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
187};
Thierry Strudel3d639192016-09-09 11:52:26 -0700188
189const QCamera3HardwareInterface::QCameraMap<
190 camera_metadata_enum_android_control_effect_mode_t,
191 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
192 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
193 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
194 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
195 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
197 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
198 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
199 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
201};
202
203const QCamera3HardwareInterface::QCameraMap<
204 camera_metadata_enum_android_control_awb_mode_t,
205 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
206 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
207 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
208 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
209 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
210 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
211 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
212 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
214 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
215};
216
217const QCamera3HardwareInterface::QCameraMap<
218 camera_metadata_enum_android_control_scene_mode_t,
219 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
220 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
221 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
222 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
223 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
224 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
226 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
227 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
228 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
229 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
230 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
231 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
232 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
233 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
234 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800235 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
236 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700237};
238
239const QCamera3HardwareInterface::QCameraMap<
240 camera_metadata_enum_android_control_af_mode_t,
241 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
242 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
244 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
245 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
246 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
247 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
249};
250
251const QCamera3HardwareInterface::QCameraMap<
252 camera_metadata_enum_android_color_correction_aberration_mode_t,
253 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
255 CAM_COLOR_CORRECTION_ABERRATION_OFF },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
257 CAM_COLOR_CORRECTION_ABERRATION_FAST },
258 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
259 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
260};
261
262const QCamera3HardwareInterface::QCameraMap<
263 camera_metadata_enum_android_control_ae_antibanding_mode_t,
264 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
269};
270
271const QCamera3HardwareInterface::QCameraMap<
272 camera_metadata_enum_android_control_ae_mode_t,
273 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
274 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
275 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
277 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
Shuzhen Wang3d11a642017-08-18 09:57:29 -0700278 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
279 { (camera_metadata_enum_android_control_ae_mode_t)
280 NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
Thierry Strudel3d639192016-09-09 11:52:26 -0700281};
282
283const QCamera3HardwareInterface::QCameraMap<
284 camera_metadata_enum_android_flash_mode_t,
285 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
286 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
287 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
288 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
289};
290
291const QCamera3HardwareInterface::QCameraMap<
292 camera_metadata_enum_android_statistics_face_detect_mode_t,
293 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
297};
298
299const QCamera3HardwareInterface::QCameraMap<
300 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
301 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
303 CAM_FOCUS_UNCALIBRATED },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
305 CAM_FOCUS_APPROXIMATE },
306 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
307 CAM_FOCUS_CALIBRATED }
308};
309
310const QCamera3HardwareInterface::QCameraMap<
311 camera_metadata_enum_android_lens_state_t,
312 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
313 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
314 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
315};
316
317const int32_t available_thumbnail_sizes[] = {0, 0,
318 176, 144,
319 240, 144,
320 256, 144,
321 240, 160,
322 256, 154,
323 240, 240,
324 320, 240};
325
326const QCamera3HardwareInterface::QCameraMap<
327 camera_metadata_enum_android_sensor_test_pattern_mode_t,
328 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
335};
336
337/* Since there is no mapping for all the options some Android enum are not listed.
338 * Also, the order in this list is important because while mapping from HAL to Android it will
339 * traverse from lower to higher index which means that for HAL values that are map to different
340 * Android values, the traverse logic will select the first one found.
341 */
342const QCamera3HardwareInterface::QCameraMap<
343 camera_metadata_enum_android_sensor_reference_illuminant1_t,
344 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
361};
362
363const QCamera3HardwareInterface::QCameraMap<
364 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
365 { 60, CAM_HFR_MODE_60FPS},
366 { 90, CAM_HFR_MODE_90FPS},
367 { 120, CAM_HFR_MODE_120FPS},
368 { 150, CAM_HFR_MODE_150FPS},
369 { 180, CAM_HFR_MODE_180FPS},
370 { 210, CAM_HFR_MODE_210FPS},
371 { 240, CAM_HFR_MODE_240FPS},
372 { 480, CAM_HFR_MODE_480FPS},
373};
374
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700375const QCamera3HardwareInterface::QCameraMap<
376 qcamera3_ext_instant_aec_mode_t,
377 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
378 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
381};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800382
383const QCamera3HardwareInterface::QCameraMap<
384 qcamera3_ext_exposure_meter_mode_t,
385 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
386 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
387 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
388 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
389 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
390 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
391 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
392 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
393};
394
395const QCamera3HardwareInterface::QCameraMap<
396 qcamera3_ext_iso_mode_t,
397 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
398 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
399 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
400 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
401 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
402 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
403 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
404 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
405 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
406};
407
Thierry Strudel3d639192016-09-09 11:52:26 -0700408camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
409 .initialize = QCamera3HardwareInterface::initialize,
410 .configure_streams = QCamera3HardwareInterface::configure_streams,
411 .register_stream_buffers = NULL,
412 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
413 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
414 .get_metadata_vendor_tag_ops = NULL,
415 .dump = QCamera3HardwareInterface::dump,
416 .flush = QCamera3HardwareInterface::flush,
417 .reserved = {0},
418};
419
420// initialise to some default value
421uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
422
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700423static inline void logEaselEvent(const char *tag, const char *event) {
424 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
425 struct timespec ts = {};
426 static int64_t kMsPerSec = 1000;
427 static int64_t kNsPerMs = 1000000;
428 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
429 if (res != OK) {
430 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
431 } else {
432 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
433 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
434 }
435 }
436}
437
Thierry Strudel3d639192016-09-09 11:52:26 -0700438/*===========================================================================
439 * FUNCTION : QCamera3HardwareInterface
440 *
441 * DESCRIPTION: constructor of QCamera3HardwareInterface
442 *
443 * PARAMETERS :
444 * @cameraId : camera ID
445 *
446 * RETURN : none
447 *==========================================================================*/
448QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
449 const camera_module_callbacks_t *callbacks)
450 : mCameraId(cameraId),
451 mCameraHandle(NULL),
452 mCameraInitialized(false),
453 mCallbackOps(NULL),
454 mMetadataChannel(NULL),
455 mPictureChannel(NULL),
456 mRawChannel(NULL),
457 mSupportChannel(NULL),
458 mAnalysisChannel(NULL),
459 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700460 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700461 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800462 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100463 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800464 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700465 mChannelHandle(0),
466 mFirstConfiguration(true),
467 mFlush(false),
468 mFlushPerf(false),
469 mParamHeap(NULL),
470 mParameters(NULL),
471 mPrevParameters(NULL),
472 m_bIsVideo(false),
473 m_bIs4KVideo(false),
474 m_bEisSupportedSize(false),
475 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800476 m_bEis3PropertyEnabled(false),
Binhao Lin09245482017-08-31 18:25:29 -0700477 m_bAVTimerEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700478 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700479 mShutterDispatcher(this),
480 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700481 mMinProcessedFrameDuration(0),
482 mMinJpegFrameDuration(0),
483 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100484 mExpectedFrameDuration(0),
485 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700486 mMetaFrameCount(0U),
487 mUpdateDebugLevel(false),
488 mCallbacks(callbacks),
489 mCaptureIntent(0),
490 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800491 /* DevCamDebug metadata internal m control*/
492 mDevCamDebugMetaEnable(0),
493 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700494 mBatchSize(0),
495 mToBeQueuedVidBufs(0),
496 mHFRVideoFps(DEFAULT_VIDEO_FPS),
497 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800498 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800499 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700500 mFirstFrameNumberInBatch(0),
501 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800502 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700503 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
504 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000505 mPDSupported(false),
506 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700507 mInstantAEC(false),
508 mResetInstantAEC(false),
509 mInstantAECSettledFrameNumber(0),
510 mAecSkipDisplayFrameBound(0),
511 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700512 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Chien-Yu Chen21b9e9a2017-09-25 14:34:26 -0700513 mLastRequestedFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800514 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700515 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700516 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700517 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700518 mState(CLOSED),
519 mIsDeviceLinked(false),
520 mIsMainCamera(true),
521 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700522 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800523 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800524 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700525 mZslEnabled(false),
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700526 mEaselMipiStarted(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800527 mIsApInputUsedForHdrPlus(false),
528 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700529 m_bSensorHDREnabled(false),
Shuzhen Wang3569d4a2017-09-04 19:10:28 -0700530 mAfTrigger(),
531 mSceneDistance(-1)
Thierry Strudel3d639192016-09-09 11:52:26 -0700532{
533 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCommon.init(gCamCapability[cameraId]);
535 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700536#ifndef USE_HAL_3_3
537 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
538#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700539 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700540#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700541 mCameraDevice.common.close = close_camera_device;
542 mCameraDevice.ops = &mCameraOps;
543 mCameraDevice.priv = this;
544 gCamCapability[cameraId]->version = CAM_HAL_V3;
545 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
546 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
547 gCamCapability[cameraId]->min_num_pp_bufs = 3;
548
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800549 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700550
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800551 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700552 mPendingLiveRequest = 0;
553 mCurrentRequestId = -1;
554 pthread_mutex_init(&mMutex, NULL);
555
556 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
557 mDefaultMetadata[i] = NULL;
558
559 // Getting system props of different kinds
560 char prop[PROPERTY_VALUE_MAX];
561 memset(prop, 0, sizeof(prop));
562 property_get("persist.camera.raw.dump", prop, "0");
563 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800564 property_get("persist.camera.hal3.force.hdr", prop, "0");
565 mForceHdrSnapshot = atoi(prop);
566
Thierry Strudel3d639192016-09-09 11:52:26 -0700567 if (mEnableRawDump)
568 LOGD("Raw dump from Camera HAL enabled");
569
570 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
571 memset(mLdafCalib, 0, sizeof(mLdafCalib));
572
Arnd Geis082a4d72017-08-24 10:33:07 -0700573 memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
Arnd Geis8cbfc182017-09-07 14:46:41 -0700574 mEaselFwUpdated = false;
Arnd Geis082a4d72017-08-24 10:33:07 -0700575
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 memset(prop, 0, sizeof(prop));
577 property_get("persist.camera.tnr.preview", prop, "0");
578 m_bTnrPreview = (uint8_t)atoi(prop);
579
580 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800581 property_get("persist.camera.swtnr.preview", prop, "1");
582 m_bSwTnrPreview = (uint8_t)atoi(prop);
583
584 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700585 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700586 m_bTnrVideo = (uint8_t)atoi(prop);
587
588 memset(prop, 0, sizeof(prop));
589 property_get("persist.camera.avtimer.debug", prop, "0");
590 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800591 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700592
Thierry Strudel54dc9782017-02-15 12:12:10 -0800593 memset(prop, 0, sizeof(prop));
594 property_get("persist.camera.cacmode.disable", prop, "0");
595 m_cacModeDisabled = (uint8_t)atoi(prop);
596
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700597 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700598 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700599
Thierry Strudel3d639192016-09-09 11:52:26 -0700600 //Load and read GPU library.
601 lib_surface_utils = NULL;
602 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700603 mSurfaceStridePadding = CAM_PAD_TO_64;
604#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700605 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
606 if (lib_surface_utils) {
607 *(void **)&LINK_get_surface_pixel_alignment =
608 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
609 if (LINK_get_surface_pixel_alignment) {
610 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
611 }
612 dlclose(lib_surface_utils);
613 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700614#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000615 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
616 mPDSupported = (0 <= mPDIndex) ? true : false;
617
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700618 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700619}
620
621/*===========================================================================
622 * FUNCTION : ~QCamera3HardwareInterface
623 *
624 * DESCRIPTION: destructor of QCamera3HardwareInterface
625 *
626 * PARAMETERS : none
627 *
628 * RETURN : none
629 *==========================================================================*/
630QCamera3HardwareInterface::~QCamera3HardwareInterface()
631{
632 LOGD("E");
633
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800634 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700635
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800636 // Disable power hint and enable the perf lock for close camera
637 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
638 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
639
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700640 // Close HDR+ client first before destroying HAL.
641 {
642 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
643 finishHdrPlusClientOpeningLocked(l);
644 if (gHdrPlusClient != nullptr) {
645 // Disable HDR+ mode.
646 disableHdrPlusModeLocked();
647 // Disconnect Easel if it's connected.
648 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
649 gHdrPlusClient = nullptr;
650 }
651 }
652
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800653 // unlink of dualcam during close camera
654 if (mIsDeviceLinked) {
655 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
656 &m_pDualCamCmdPtr->bundle_info;
657 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
658 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
659 pthread_mutex_lock(&gCamLock);
660
661 if (mIsMainCamera == 1) {
662 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
663 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
664 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
665 // related session id should be session id of linked session
666 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
667 } else {
668 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
669 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
670 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
671 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
672 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800673 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800674 pthread_mutex_unlock(&gCamLock);
675
676 rc = mCameraHandle->ops->set_dual_cam_cmd(
677 mCameraHandle->camera_handle);
678 if (rc < 0) {
679 LOGE("Dualcam: Unlink failed, but still proceed to close");
680 }
681 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700682
683 /* We need to stop all streams before deleting any stream */
684 if (mRawDumpChannel) {
685 mRawDumpChannel->stop();
686 }
687
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700688 if (mHdrPlusRawSrcChannel) {
689 mHdrPlusRawSrcChannel->stop();
690 }
691
Thierry Strudel3d639192016-09-09 11:52:26 -0700692 // NOTE: 'camera3_stream_t *' objects are already freed at
693 // this stage by the framework
694 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
695 it != mStreamInfo.end(); it++) {
696 QCamera3ProcessingChannel *channel = (*it)->channel;
697 if (channel) {
698 channel->stop();
699 }
700 }
701 if (mSupportChannel)
702 mSupportChannel->stop();
703
704 if (mAnalysisChannel) {
705 mAnalysisChannel->stop();
706 }
707 if (mMetadataChannel) {
708 mMetadataChannel->stop();
709 }
710 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700711 stopChannelLocked(/*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700712 }
713
714 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
715 it != mStreamInfo.end(); it++) {
716 QCamera3ProcessingChannel *channel = (*it)->channel;
717 if (channel)
718 delete channel;
719 free (*it);
720 }
721 if (mSupportChannel) {
722 delete mSupportChannel;
723 mSupportChannel = NULL;
724 }
725
726 if (mAnalysisChannel) {
727 delete mAnalysisChannel;
728 mAnalysisChannel = NULL;
729 }
730 if (mRawDumpChannel) {
731 delete mRawDumpChannel;
732 mRawDumpChannel = NULL;
733 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700734 if (mHdrPlusRawSrcChannel) {
735 delete mHdrPlusRawSrcChannel;
736 mHdrPlusRawSrcChannel = NULL;
737 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700738 if (mDummyBatchChannel) {
739 delete mDummyBatchChannel;
740 mDummyBatchChannel = NULL;
741 }
742
743 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800744 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700745
746 if (mMetadataChannel) {
747 delete mMetadataChannel;
748 mMetadataChannel = NULL;
749 }
750
751 /* Clean up all channels */
752 if (mCameraInitialized) {
753 if(!mFirstConfiguration){
754 //send the last unconfigure
755 cam_stream_size_info_t stream_config_info;
756 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
757 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
758 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800759 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700760 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700761 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700762 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
763 stream_config_info);
764 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
765 if (rc < 0) {
766 LOGE("set_parms failed for unconfigure");
767 }
768 }
769 deinitParameters();
770 }
771
772 if (mChannelHandle) {
773 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
774 mChannelHandle);
775 LOGH("deleting channel %d", mChannelHandle);
776 mChannelHandle = 0;
777 }
778
779 if (mState != CLOSED)
780 closeCamera();
781
782 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
783 req.mPendingBufferList.clear();
784 }
785 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700786 for (pendingRequestIterator i = mPendingRequestsList.begin();
787 i != mPendingRequestsList.end();) {
788 i = erasePendingRequest(i);
789 }
790 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
791 if (mDefaultMetadata[i])
792 free_camera_metadata(mDefaultMetadata[i]);
793
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800794 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700795
796 pthread_cond_destroy(&mRequestCond);
797
798 pthread_cond_destroy(&mBuffersCond);
799
800 pthread_mutex_destroy(&mMutex);
801 LOGD("X");
802}
803
804/*===========================================================================
805 * FUNCTION : erasePendingRequest
806 *
807 * DESCRIPTION: function to erase a desired pending request after freeing any
808 * allocated memory
809 *
810 * PARAMETERS :
811 * @i : iterator pointing to pending request to be erased
812 *
813 * RETURN : iterator pointing to the next request
814 *==========================================================================*/
815QCamera3HardwareInterface::pendingRequestIterator
816 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
817{
818 if (i->input_buffer != NULL) {
819 free(i->input_buffer);
820 i->input_buffer = NULL;
821 }
822 if (i->settings != NULL)
823 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100824
825 mExpectedInflightDuration -= i->expectedFrameDuration;
826 if (mExpectedInflightDuration < 0) {
827 LOGE("Negative expected in-flight duration!");
828 mExpectedInflightDuration = 0;
829 }
830
Thierry Strudel3d639192016-09-09 11:52:26 -0700831 return mPendingRequestsList.erase(i);
832}
833
834/*===========================================================================
835 * FUNCTION : camEvtHandle
836 *
837 * DESCRIPTION: Function registered to mm-camera-interface to handle events
838 *
839 * PARAMETERS :
840 * @camera_handle : interface layer camera handle
841 * @evt : ptr to event
842 * @user_data : user data ptr
843 *
844 * RETURN : none
845 *==========================================================================*/
846void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
847 mm_camera_event_t *evt,
848 void *user_data)
849{
850 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
851 if (obj && evt) {
852 switch(evt->server_event_type) {
853 case CAM_EVENT_TYPE_DAEMON_DIED:
854 pthread_mutex_lock(&obj->mMutex);
855 obj->mState = ERROR;
856 pthread_mutex_unlock(&obj->mMutex);
857 LOGE("Fatal, camera daemon died");
858 break;
859
860 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
861 LOGD("HAL got request pull from Daemon");
862 pthread_mutex_lock(&obj->mMutex);
863 obj->mWokenUpByDaemon = true;
864 obj->unblockRequestIfNecessary();
865 pthread_mutex_unlock(&obj->mMutex);
866 break;
867
868 default:
869 LOGW("Warning: Unhandled event %d",
870 evt->server_event_type);
871 break;
872 }
873 } else {
874 LOGE("NULL user_data/evt");
875 }
876}
877
878/*===========================================================================
879 * FUNCTION : openCamera
880 *
881 * DESCRIPTION: open camera
882 *
883 * PARAMETERS :
884 * @hw_device : double ptr for camera device struct
885 *
886 * RETURN : int32_t type of status
887 * NO_ERROR -- success
888 * none-zero failure code
889 *==========================================================================*/
890int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
891{
892 int rc = 0;
893 if (mState != CLOSED) {
894 *hw_device = NULL;
895 return PERMISSION_DENIED;
896 }
897
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700898 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800899 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700900 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
901 mCameraId);
902
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700903 if (mCameraHandle) {
904 LOGE("Failure: Camera already opened");
905 return ALREADY_EXISTS;
906 }
907
908 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700909 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700910 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700911 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700912 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700913 if (rc != 0) {
914 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
915 return rc;
916 }
Arnd Geis8cbfc182017-09-07 14:46:41 -0700917 mEaselFwUpdated = false;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700918 }
919 }
920
Thierry Strudel3d639192016-09-09 11:52:26 -0700921 rc = openCamera();
922 if (rc == 0) {
923 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800924 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700925 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700926
927 // Suspend Easel because opening camera failed.
928 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700929 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700930 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
931 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700932 if (suspendErr != 0) {
933 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
934 strerror(-suspendErr), suspendErr);
935 }
936 }
937 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800938 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700939
Thierry Strudel3d639192016-09-09 11:52:26 -0700940 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
941 mCameraId, rc);
942
943 if (rc == NO_ERROR) {
944 mState = OPENED;
945 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800946
Thierry Strudel3d639192016-09-09 11:52:26 -0700947 return rc;
948}
949
950/*===========================================================================
951 * FUNCTION : openCamera
952 *
953 * DESCRIPTION: open camera
954 *
955 * PARAMETERS : none
956 *
957 * RETURN : int32_t type of status
958 * NO_ERROR -- success
959 * none-zero failure code
960 *==========================================================================*/
961int QCamera3HardwareInterface::openCamera()
962{
963 int rc = 0;
964 char value[PROPERTY_VALUE_MAX];
965
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800966 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800967
Thierry Strudel3d639192016-09-09 11:52:26 -0700968 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
969 if (rc < 0) {
970 LOGE("Failed to reserve flash for camera id: %d",
971 mCameraId);
972 return UNKNOWN_ERROR;
973 }
974
975 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
976 if (rc) {
977 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
978 return rc;
979 }
980
981 if (!mCameraHandle) {
982 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
983 return -ENODEV;
984 }
985
986 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
987 camEvtHandle, (void *)this);
988
989 if (rc < 0) {
990 LOGE("Error, failed to register event callback");
991 /* Not closing camera here since it is already handled in destructor */
992 return FAILED_TRANSACTION;
993 }
994
995 mExifParams.debug_params =
996 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
997 if (mExifParams.debug_params) {
998 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
999 } else {
1000 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
1001 return NO_MEMORY;
1002 }
1003 mFirstConfiguration = true;
1004
1005 //Notify display HAL that a camera session is active.
1006 //But avoid calling the same during bootup because camera service might open/close
1007 //cameras at boot time during its initialization and display service will also internally
1008 //wait for camera service to initialize first while calling this display API, resulting in a
1009 //deadlock situation. Since boot time camera open/close calls are made only to fetch
1010 //capabilities, no need of this display bw optimization.
1011 //Use "service.bootanim.exit" property to know boot status.
1012 property_get("service.bootanim.exit", value, "0");
1013 if (atoi(value) == 1) {
1014 pthread_mutex_lock(&gCamLock);
1015 if (gNumCameraSessions++ == 0) {
1016 setCameraLaunchStatus(true);
1017 }
1018 pthread_mutex_unlock(&gCamLock);
1019 }
1020
1021 //fill the session id needed while linking dual cam
1022 pthread_mutex_lock(&gCamLock);
1023 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1024 &sessionId[mCameraId]);
1025 pthread_mutex_unlock(&gCamLock);
1026
1027 if (rc < 0) {
1028 LOGE("Error, failed to get sessiion id");
1029 return UNKNOWN_ERROR;
1030 } else {
1031 //Allocate related cam sync buffer
1032 //this is needed for the payload that goes along with bundling cmd for related
1033 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001034 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1035 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001036 if(rc != OK) {
1037 rc = NO_MEMORY;
1038 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1039 return NO_MEMORY;
1040 }
1041
1042 //Map memory for related cam sync buffer
1043 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001044 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1045 m_pDualCamCmdHeap->getFd(0),
1046 sizeof(cam_dual_camera_cmd_info_t),
1047 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001048 if(rc < 0) {
1049 LOGE("Dualcam: failed to map Related cam sync buffer");
1050 rc = FAILED_TRANSACTION;
1051 return NO_MEMORY;
1052 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001053 m_pDualCamCmdPtr =
1054 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001055 }
1056
1057 LOGH("mCameraId=%d",mCameraId);
1058
1059 return NO_ERROR;
1060}
1061
1062/*===========================================================================
1063 * FUNCTION : closeCamera
1064 *
1065 * DESCRIPTION: close camera
1066 *
1067 * PARAMETERS : none
1068 *
1069 * RETURN : int32_t type of status
1070 * NO_ERROR -- success
1071 * none-zero failure code
1072 *==========================================================================*/
1073int QCamera3HardwareInterface::closeCamera()
1074{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001075 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001076 int rc = NO_ERROR;
1077 char value[PROPERTY_VALUE_MAX];
1078
1079 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1080 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001081
1082 // unmap memory for related cam sync buffer
1083 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001084 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001085 if (NULL != m_pDualCamCmdHeap) {
1086 m_pDualCamCmdHeap->deallocate();
1087 delete m_pDualCamCmdHeap;
1088 m_pDualCamCmdHeap = NULL;
1089 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001090 }
1091
Thierry Strudel3d639192016-09-09 11:52:26 -07001092 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1093 mCameraHandle = NULL;
1094
1095 //reset session id to some invalid id
1096 pthread_mutex_lock(&gCamLock);
1097 sessionId[mCameraId] = 0xDEADBEEF;
1098 pthread_mutex_unlock(&gCamLock);
1099
1100 //Notify display HAL that there is no active camera session
1101 //but avoid calling the same during bootup. Refer to openCamera
1102 //for more details.
1103 property_get("service.bootanim.exit", value, "0");
1104 if (atoi(value) == 1) {
1105 pthread_mutex_lock(&gCamLock);
1106 if (--gNumCameraSessions == 0) {
1107 setCameraLaunchStatus(false);
1108 }
1109 pthread_mutex_unlock(&gCamLock);
1110 }
1111
Thierry Strudel3d639192016-09-09 11:52:26 -07001112 if (mExifParams.debug_params) {
1113 free(mExifParams.debug_params);
1114 mExifParams.debug_params = NULL;
1115 }
1116 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1117 LOGW("Failed to release flash for camera id: %d",
1118 mCameraId);
1119 }
1120 mState = CLOSED;
1121 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1122 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001123
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001124 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001125 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001126 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001127 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001128 if (rc != 0) {
1129 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1130 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001131 }
1132 }
1133
Thierry Strudel3d639192016-09-09 11:52:26 -07001134 return rc;
1135}
1136
1137/*===========================================================================
1138 * FUNCTION : initialize
1139 *
1140 * DESCRIPTION: Initialize frameworks callback functions
1141 *
1142 * PARAMETERS :
1143 * @callback_ops : callback function to frameworks
1144 *
1145 * RETURN :
1146 *
1147 *==========================================================================*/
1148int QCamera3HardwareInterface::initialize(
1149 const struct camera3_callback_ops *callback_ops)
1150{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001151 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001152 int rc;
1153
1154 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1155 pthread_mutex_lock(&mMutex);
1156
1157 // Validate current state
1158 switch (mState) {
1159 case OPENED:
1160 /* valid state */
1161 break;
1162 default:
1163 LOGE("Invalid state %d", mState);
1164 rc = -ENODEV;
1165 goto err1;
1166 }
1167
1168 rc = initParameters();
1169 if (rc < 0) {
1170 LOGE("initParamters failed %d", rc);
1171 goto err1;
1172 }
1173 mCallbackOps = callback_ops;
1174
1175 mChannelHandle = mCameraHandle->ops->add_channel(
1176 mCameraHandle->camera_handle, NULL, NULL, this);
1177 if (mChannelHandle == 0) {
1178 LOGE("add_channel failed");
1179 rc = -ENOMEM;
1180 pthread_mutex_unlock(&mMutex);
1181 return rc;
1182 }
1183
1184 pthread_mutex_unlock(&mMutex);
1185 mCameraInitialized = true;
1186 mState = INITIALIZED;
1187 LOGI("X");
1188 return 0;
1189
1190err1:
1191 pthread_mutex_unlock(&mMutex);
1192 return rc;
1193}
1194
1195/*===========================================================================
1196 * FUNCTION : validateStreamDimensions
1197 *
1198 * DESCRIPTION: Check if the configuration requested are those advertised
1199 *
1200 * PARAMETERS :
1201 * @stream_list : streams to be configured
1202 *
1203 * RETURN :
1204 *
1205 *==========================================================================*/
1206int QCamera3HardwareInterface::validateStreamDimensions(
1207 camera3_stream_configuration_t *streamList)
1208{
1209 int rc = NO_ERROR;
1210 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001211 uint32_t depthWidth = 0;
1212 uint32_t depthHeight = 0;
1213 if (mPDSupported) {
1214 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1215 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1216 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001217
1218 camera3_stream_t *inputStream = NULL;
1219 /*
1220 * Loop through all streams to find input stream if it exists*
1221 */
1222 for (size_t i = 0; i< streamList->num_streams; i++) {
1223 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1224 if (inputStream != NULL) {
1225 LOGE("Error, Multiple input streams requested");
1226 return -EINVAL;
1227 }
1228 inputStream = streamList->streams[i];
1229 }
1230 }
1231 /*
1232 * Loop through all streams requested in configuration
1233 * Check if unsupported sizes have been requested on any of them
1234 */
1235 for (size_t j = 0; j < streamList->num_streams; j++) {
1236 bool sizeFound = false;
1237 camera3_stream_t *newStream = streamList->streams[j];
1238
1239 uint32_t rotatedHeight = newStream->height;
1240 uint32_t rotatedWidth = newStream->width;
1241 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1242 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1243 rotatedHeight = newStream->width;
1244 rotatedWidth = newStream->height;
1245 }
1246
1247 /*
1248 * Sizes are different for each type of stream format check against
1249 * appropriate table.
1250 */
1251 switch (newStream->format) {
1252 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1253 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1254 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001255 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1256 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1257 mPDSupported) {
1258 if ((depthWidth == newStream->width) &&
1259 (depthHeight == newStream->height)) {
1260 sizeFound = true;
1261 }
1262 break;
1263 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001264 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1265 for (size_t i = 0; i < count; i++) {
1266 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1267 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1268 sizeFound = true;
1269 break;
1270 }
1271 }
1272 break;
1273 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001274 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1275 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001276 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001277 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001278 if ((depthSamplesCount == newStream->width) &&
1279 (1 == newStream->height)) {
1280 sizeFound = true;
1281 }
1282 break;
1283 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001284 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1285 /* Verify set size against generated sizes table */
1286 for (size_t i = 0; i < count; i++) {
1287 if (((int32_t)rotatedWidth ==
1288 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1289 ((int32_t)rotatedHeight ==
1290 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1291 sizeFound = true;
1292 break;
1293 }
1294 }
1295 break;
1296 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1297 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1298 default:
1299 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1300 || newStream->stream_type == CAMERA3_STREAM_INPUT
1301 || IS_USAGE_ZSL(newStream->usage)) {
1302 if (((int32_t)rotatedWidth ==
1303 gCamCapability[mCameraId]->active_array_size.width) &&
1304 ((int32_t)rotatedHeight ==
1305 gCamCapability[mCameraId]->active_array_size.height)) {
1306 sizeFound = true;
1307 break;
1308 }
1309 /* We could potentially break here to enforce ZSL stream
1310 * set from frameworks always is full active array size
1311 * but it is not clear from the spc if framework will always
1312 * follow that, also we have logic to override to full array
1313 * size, so keeping the logic lenient at the moment
1314 */
1315 }
1316 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1317 MAX_SIZES_CNT);
1318 for (size_t i = 0; i < count; i++) {
1319 if (((int32_t)rotatedWidth ==
1320 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1321 ((int32_t)rotatedHeight ==
1322 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1323 sizeFound = true;
1324 break;
1325 }
1326 }
1327 break;
1328 } /* End of switch(newStream->format) */
1329
1330 /* We error out even if a single stream has unsupported size set */
1331 if (!sizeFound) {
1332 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1333 rotatedWidth, rotatedHeight, newStream->format,
1334 gCamCapability[mCameraId]->active_array_size.width,
1335 gCamCapability[mCameraId]->active_array_size.height);
1336 rc = -EINVAL;
1337 break;
1338 }
1339 } /* End of for each stream */
1340 return rc;
1341}
1342
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001343/*===========================================================================
1344 * FUNCTION : validateUsageFlags
1345 *
1346 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1347 *
1348 * PARAMETERS :
1349 * @stream_list : streams to be configured
1350 *
1351 * RETURN :
1352 * NO_ERROR if the usage flags are supported
1353 * error code if usage flags are not supported
1354 *
1355 *==========================================================================*/
1356int QCamera3HardwareInterface::validateUsageFlags(
1357 const camera3_stream_configuration_t* streamList)
1358{
1359 for (size_t j = 0; j < streamList->num_streams; j++) {
1360 const camera3_stream_t *newStream = streamList->streams[j];
1361
1362 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1363 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1364 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1365 continue;
1366 }
1367
Jason Leec4cf5032017-05-24 18:31:41 -07001368 // Here we only care whether it's EIS3 or not
1369 char is_type_value[PROPERTY_VALUE_MAX];
1370 property_get("persist.camera.is_type", is_type_value, "4");
1371 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1372 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1373 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1374 isType = IS_TYPE_NONE;
1375
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001376 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1377 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1378 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1379 bool forcePreviewUBWC = true;
1380 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1381 forcePreviewUBWC = false;
1382 }
1383 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001384 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001385 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001386 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001387 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001388 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001389
1390 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1391 // So color spaces will always match.
1392
1393 // Check whether underlying formats of shared streams match.
1394 if (isVideo && isPreview && videoFormat != previewFormat) {
1395 LOGE("Combined video and preview usage flag is not supported");
1396 return -EINVAL;
1397 }
1398 if (isPreview && isZSL && previewFormat != zslFormat) {
1399 LOGE("Combined preview and zsl usage flag is not supported");
1400 return -EINVAL;
1401 }
1402 if (isVideo && isZSL && videoFormat != zslFormat) {
1403 LOGE("Combined video and zsl usage flag is not supported");
1404 return -EINVAL;
1405 }
1406 }
1407 return NO_ERROR;
1408}
1409
1410/*===========================================================================
1411 * FUNCTION : validateUsageFlagsForEis
1412 *
1413 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1414 *
1415 * PARAMETERS :
1416 * @stream_list : streams to be configured
1417 *
1418 * RETURN :
1419 * NO_ERROR if the usage flags are supported
1420 * error code if usage flags are not supported
1421 *
1422 *==========================================================================*/
1423int QCamera3HardwareInterface::validateUsageFlagsForEis(
1424 const camera3_stream_configuration_t* streamList)
1425{
1426 for (size_t j = 0; j < streamList->num_streams; j++) {
1427 const camera3_stream_t *newStream = streamList->streams[j];
1428
1429 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1430 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1431
1432 // Because EIS is "hard-coded" for certain use case, and current
1433 // implementation doesn't support shared preview and video on the same
1434 // stream, return failure if EIS is forced on.
1435 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1436 LOGE("Combined video and preview usage flag is not supported due to EIS");
1437 return -EINVAL;
1438 }
1439 }
1440 return NO_ERROR;
1441}
1442
Thierry Strudel3d639192016-09-09 11:52:26 -07001443/*==============================================================================
1444 * FUNCTION : isSupportChannelNeeded
1445 *
1446 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1447 *
1448 * PARAMETERS :
1449 * @stream_list : streams to be configured
1450 * @stream_config_info : the config info for streams to be configured
1451 *
1452 * RETURN : Boolen true/false decision
1453 *
1454 *==========================================================================*/
1455bool QCamera3HardwareInterface::isSupportChannelNeeded(
1456 camera3_stream_configuration_t *streamList,
1457 cam_stream_size_info_t stream_config_info)
1458{
1459 uint32_t i;
1460 bool pprocRequested = false;
1461 /* Check for conditions where PProc pipeline does not have any streams*/
1462 for (i = 0; i < stream_config_info.num_streams; i++) {
1463 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1464 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1465 pprocRequested = true;
1466 break;
1467 }
1468 }
1469
1470 if (pprocRequested == false )
1471 return true;
1472
1473 /* Dummy stream needed if only raw or jpeg streams present */
1474 for (i = 0; i < streamList->num_streams; i++) {
1475 switch(streamList->streams[i]->format) {
1476 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1477 case HAL_PIXEL_FORMAT_RAW10:
1478 case HAL_PIXEL_FORMAT_RAW16:
1479 case HAL_PIXEL_FORMAT_BLOB:
1480 break;
1481 default:
1482 return false;
1483 }
1484 }
1485 return true;
1486}
1487
1488/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001489 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001490 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001491 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001492 *
1493 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001494 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001495 *
1496 * RETURN : int32_t type of status
1497 * NO_ERROR -- success
1498 * none-zero failure code
1499 *
1500 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001501int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001502{
1503 int32_t rc = NO_ERROR;
1504
1505 cam_dimension_t max_dim = {0, 0};
1506 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1507 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1508 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1509 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1510 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1511 }
1512
1513 clear_metadata_buffer(mParameters);
1514
1515 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1516 max_dim);
1517 if (rc != NO_ERROR) {
1518 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1519 return rc;
1520 }
1521
1522 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1523 if (rc != NO_ERROR) {
1524 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1525 return rc;
1526 }
1527
1528 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001529 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001530
1531 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1532 mParameters);
1533 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001534 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001535 return rc;
1536 }
1537
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001538 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001539 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1540 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1541 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1542 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1543 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001544
1545 return rc;
1546}
1547
1548/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001549 * FUNCTION : getCurrentSensorModeInfo
1550 *
1551 * DESCRIPTION: Get sensor mode information that is currently selected.
1552 *
1553 * PARAMETERS :
1554 * @sensorModeInfo : sensor mode information (output)
1555 *
1556 * RETURN : int32_t type of status
1557 * NO_ERROR -- success
1558 * none-zero failure code
1559 *
1560 *==========================================================================*/
1561int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1562{
1563 int32_t rc = NO_ERROR;
1564
1565 clear_metadata_buffer(mParameters);
1566 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1567
1568 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1569 mParameters);
1570 if (rc != NO_ERROR) {
1571 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1572 return rc;
1573 }
1574
1575 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1576 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1577 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1578 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1579 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1580 sensorModeInfo.num_raw_bits);
1581
1582 return rc;
1583}
1584
1585/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001586 * FUNCTION : addToPPFeatureMask
1587 *
1588 * DESCRIPTION: add additional features to pp feature mask based on
1589 * stream type and usecase
1590 *
1591 * PARAMETERS :
1592 * @stream_format : stream type for feature mask
1593 * @stream_idx : stream idx within postprocess_mask list to change
1594 *
1595 * RETURN : NULL
1596 *
1597 *==========================================================================*/
1598void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1599 uint32_t stream_idx)
1600{
1601 char feature_mask_value[PROPERTY_VALUE_MAX];
1602 cam_feature_mask_t feature_mask;
1603 int args_converted;
1604 int property_len;
1605
1606 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001607#ifdef _LE_CAMERA_
1608 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1609 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1610 property_len = property_get("persist.camera.hal3.feature",
1611 feature_mask_value, swtnr_feature_mask_value);
1612#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001613 property_len = property_get("persist.camera.hal3.feature",
1614 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001615#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001616 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1617 (feature_mask_value[1] == 'x')) {
1618 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1619 } else {
1620 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1621 }
1622 if (1 != args_converted) {
1623 feature_mask = 0;
1624 LOGE("Wrong feature mask %s", feature_mask_value);
1625 return;
1626 }
1627
1628 switch (stream_format) {
1629 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1630 /* Add LLVD to pp feature mask only if video hint is enabled */
1631 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1632 mStreamConfigInfo.postprocess_mask[stream_idx]
1633 |= CAM_QTI_FEATURE_SW_TNR;
1634 LOGH("Added SW TNR to pp feature mask");
1635 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1636 mStreamConfigInfo.postprocess_mask[stream_idx]
1637 |= CAM_QCOM_FEATURE_LLVD;
1638 LOGH("Added LLVD SeeMore to pp feature mask");
1639 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001640 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1641 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1642 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1643 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001644 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1645 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1646 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1647 CAM_QTI_FEATURE_BINNING_CORRECTION;
1648 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001649 break;
1650 }
1651 default:
1652 break;
1653 }
1654 LOGD("PP feature mask %llx",
1655 mStreamConfigInfo.postprocess_mask[stream_idx]);
1656}
1657
1658/*==============================================================================
1659 * FUNCTION : updateFpsInPreviewBuffer
1660 *
1661 * DESCRIPTION: update FPS information in preview buffer.
1662 *
1663 * PARAMETERS :
1664 * @metadata : pointer to metadata buffer
1665 * @frame_number: frame_number to look for in pending buffer list
1666 *
1667 * RETURN : None
1668 *
1669 *==========================================================================*/
1670void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1671 uint32_t frame_number)
1672{
1673 // Mark all pending buffers for this particular request
1674 // with corresponding framerate information
1675 for (List<PendingBuffersInRequest>::iterator req =
1676 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1677 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1678 for(List<PendingBufferInfo>::iterator j =
1679 req->mPendingBufferList.begin();
1680 j != req->mPendingBufferList.end(); j++) {
1681 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1682 if ((req->frame_number == frame_number) &&
1683 (channel->getStreamTypeMask() &
1684 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1685 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1686 CAM_INTF_PARM_FPS_RANGE, metadata) {
1687 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1688 struct private_handle_t *priv_handle =
1689 (struct private_handle_t *)(*(j->buffer));
1690 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1691 }
1692 }
1693 }
1694 }
1695}
1696
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001697/*==============================================================================
1698 * FUNCTION : updateTimeStampInPendingBuffers
1699 *
1700 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1701 * of a frame number
1702 *
1703 * PARAMETERS :
1704 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1705 * @timestamp : timestamp to be set
1706 *
1707 * RETURN : None
1708 *
1709 *==========================================================================*/
1710void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1711 uint32_t frameNumber, nsecs_t timestamp)
1712{
1713 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1714 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
Binhao Lin09245482017-08-31 18:25:29 -07001715 // WAR: save the av_timestamp to the next frame
1716 if(req->frame_number == frameNumber + 1) {
1717 req->av_timestamp = timestamp;
1718 }
1719
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001720 if (req->frame_number != frameNumber)
1721 continue;
1722
1723 for (auto k = req->mPendingBufferList.begin();
1724 k != req->mPendingBufferList.end(); k++ ) {
Binhao Lin09245482017-08-31 18:25:29 -07001725 // WAR: update timestamp when it's not VT usecase
1726 QCamera3Channel *channel = (QCamera3Channel *)k->stream->priv;
1727 if (!((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
1728 m_bAVTimerEnabled)) {
1729 struct private_handle_t *priv_handle =
1730 (struct private_handle_t *) (*(k->buffer));
1731 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1732 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001733 }
1734 }
1735 return;
1736}
1737
Thierry Strudel3d639192016-09-09 11:52:26 -07001738/*===========================================================================
1739 * FUNCTION : configureStreams
1740 *
1741 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1742 * and output streams.
1743 *
1744 * PARAMETERS :
1745 * @stream_list : streams to be configured
1746 *
1747 * RETURN :
1748 *
1749 *==========================================================================*/
1750int QCamera3HardwareInterface::configureStreams(
1751 camera3_stream_configuration_t *streamList)
1752{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001753 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001754 int rc = 0;
1755
1756 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001757 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001758 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001759 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001760
1761 return rc;
1762}
1763
1764/*===========================================================================
1765 * FUNCTION : configureStreamsPerfLocked
1766 *
1767 * DESCRIPTION: configureStreams while perfLock is held.
1768 *
1769 * PARAMETERS :
1770 * @stream_list : streams to be configured
1771 *
1772 * RETURN : int32_t type of status
1773 * NO_ERROR -- success
1774 * none-zero failure code
1775 *==========================================================================*/
1776int QCamera3HardwareInterface::configureStreamsPerfLocked(
1777 camera3_stream_configuration_t *streamList)
1778{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001779 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001780 int rc = 0;
1781
1782 // Sanity check stream_list
1783 if (streamList == NULL) {
1784 LOGE("NULL stream configuration");
1785 return BAD_VALUE;
1786 }
1787 if (streamList->streams == NULL) {
1788 LOGE("NULL stream list");
1789 return BAD_VALUE;
1790 }
1791
1792 if (streamList->num_streams < 1) {
1793 LOGE("Bad number of streams requested: %d",
1794 streamList->num_streams);
1795 return BAD_VALUE;
1796 }
1797
1798 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1799 LOGE("Maximum number of streams %d exceeded: %d",
1800 MAX_NUM_STREAMS, streamList->num_streams);
1801 return BAD_VALUE;
1802 }
1803
Jason Leec4cf5032017-05-24 18:31:41 -07001804 mOpMode = streamList->operation_mode;
1805 LOGD("mOpMode: %d", mOpMode);
1806
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001807 rc = validateUsageFlags(streamList);
1808 if (rc != NO_ERROR) {
1809 return rc;
1810 }
1811
Chien-Yu Chen11c8edc2017-09-11 20:54:24 -07001812 // Disable HDR+ if it's enabled;
Chien-Yu Chen153c5172017-09-08 11:33:19 -07001813 {
1814 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1815 finishHdrPlusClientOpeningLocked(l);
1816 disableHdrPlusModeLocked();
1817 }
1818
Thierry Strudel3d639192016-09-09 11:52:26 -07001819 /* first invalidate all the steams in the mStreamList
1820 * if they appear again, they will be validated */
1821 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1822 it != mStreamInfo.end(); it++) {
1823 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1824 if (channel) {
1825 channel->stop();
1826 }
1827 (*it)->status = INVALID;
1828 }
1829
1830 if (mRawDumpChannel) {
1831 mRawDumpChannel->stop();
1832 delete mRawDumpChannel;
1833 mRawDumpChannel = NULL;
1834 }
1835
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001836 if (mHdrPlusRawSrcChannel) {
1837 mHdrPlusRawSrcChannel->stop();
1838 delete mHdrPlusRawSrcChannel;
1839 mHdrPlusRawSrcChannel = NULL;
1840 }
1841
Thierry Strudel3d639192016-09-09 11:52:26 -07001842 if (mSupportChannel)
1843 mSupportChannel->stop();
1844
1845 if (mAnalysisChannel) {
1846 mAnalysisChannel->stop();
1847 }
1848 if (mMetadataChannel) {
1849 /* If content of mStreamInfo is not 0, there is metadata stream */
1850 mMetadataChannel->stop();
1851 }
1852 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07001853 stopChannelLocked(/*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001854 }
1855
1856 pthread_mutex_lock(&mMutex);
1857
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001858 mPictureChannel = NULL;
1859
Thierry Strudel3d639192016-09-09 11:52:26 -07001860 // Check state
1861 switch (mState) {
1862 case INITIALIZED:
1863 case CONFIGURED:
1864 case STARTED:
1865 /* valid state */
1866 break;
1867 default:
1868 LOGE("Invalid state %d", mState);
1869 pthread_mutex_unlock(&mMutex);
1870 return -ENODEV;
1871 }
1872
1873 /* Check whether we have video stream */
1874 m_bIs4KVideo = false;
1875 m_bIsVideo = false;
1876 m_bEisSupportedSize = false;
1877 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001878 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001879 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001880 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001881 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001882 uint32_t videoWidth = 0U;
1883 uint32_t videoHeight = 0U;
1884 size_t rawStreamCnt = 0;
1885 size_t stallStreamCnt = 0;
1886 size_t processedStreamCnt = 0;
1887 // Number of streams on ISP encoder path
1888 size_t numStreamsOnEncoder = 0;
1889 size_t numYuv888OnEncoder = 0;
1890 bool bYuv888OverrideJpeg = false;
1891 cam_dimension_t largeYuv888Size = {0, 0};
1892 cam_dimension_t maxViewfinderSize = {0, 0};
1893 bool bJpegExceeds4K = false;
1894 bool bJpegOnEncoder = false;
1895 bool bUseCommonFeatureMask = false;
1896 cam_feature_mask_t commonFeatureMask = 0;
1897 bool bSmallJpegSize = false;
1898 uint32_t width_ratio;
1899 uint32_t height_ratio;
1900 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1901 camera3_stream_t *inputStream = NULL;
1902 bool isJpeg = false;
1903 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001904 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001905 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001906
1907 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1908
1909 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001910 uint8_t eis_prop_set;
1911 uint32_t maxEisWidth = 0;
1912 uint32_t maxEisHeight = 0;
1913
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001914 // Initialize all instant AEC related variables
1915 mInstantAEC = false;
1916 mResetInstantAEC = false;
1917 mInstantAECSettledFrameNumber = 0;
1918 mAecSkipDisplayFrameBound = 0;
1919 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001920 mCurrFeatureState = 0;
1921 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001922
Binhao Lin09245482017-08-31 18:25:29 -07001923 m_bAVTimerEnabled = false;
1924
Thierry Strudel3d639192016-09-09 11:52:26 -07001925 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1926
1927 size_t count = IS_TYPE_MAX;
1928 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1929 for (size_t i = 0; i < count; i++) {
1930 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001931 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1932 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001933 break;
1934 }
1935 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001936
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001937 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001938 maxEisWidth = MAX_EIS_WIDTH;
1939 maxEisHeight = MAX_EIS_HEIGHT;
1940 }
1941
1942 /* EIS setprop control */
1943 char eis_prop[PROPERTY_VALUE_MAX];
1944 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001945 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001946 eis_prop_set = (uint8_t)atoi(eis_prop);
1947
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001948 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001949 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1950
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001951 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1952 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001953
Thierry Strudel3d639192016-09-09 11:52:26 -07001954 /* stream configurations */
1955 for (size_t i = 0; i < streamList->num_streams; i++) {
1956 camera3_stream_t *newStream = streamList->streams[i];
1957 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1958 "height = %d, rotation = %d, usage = 0x%x",
1959 i, newStream->stream_type, newStream->format,
1960 newStream->width, newStream->height, newStream->rotation,
1961 newStream->usage);
1962 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1963 newStream->stream_type == CAMERA3_STREAM_INPUT){
1964 isZsl = true;
1965 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001966 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1967 IS_USAGE_PREVIEW(newStream->usage)) {
1968 isPreview = true;
1969 }
1970
Thierry Strudel3d639192016-09-09 11:52:26 -07001971 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1972 inputStream = newStream;
1973 }
1974
Emilian Peev7650c122017-01-19 08:24:33 -08001975 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1976 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001977 isJpeg = true;
1978 jpegSize.width = newStream->width;
1979 jpegSize.height = newStream->height;
1980 if (newStream->width > VIDEO_4K_WIDTH ||
1981 newStream->height > VIDEO_4K_HEIGHT)
1982 bJpegExceeds4K = true;
1983 }
1984
1985 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1986 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1987 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001988 // In HAL3 we can have multiple different video streams.
1989 // The variables video width and height are used below as
1990 // dimensions of the biggest of them
1991 if (videoWidth < newStream->width ||
1992 videoHeight < newStream->height) {
1993 videoWidth = newStream->width;
1994 videoHeight = newStream->height;
1995 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001996 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1997 (VIDEO_4K_HEIGHT <= newStream->height)) {
1998 m_bIs4KVideo = true;
1999 }
2000 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
2001 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002002
Thierry Strudel3d639192016-09-09 11:52:26 -07002003 }
2004 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
2005 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
2006 switch (newStream->format) {
2007 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002008 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2009 depthPresent = true;
2010 break;
2011 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002012 stallStreamCnt++;
2013 if (isOnEncoder(maxViewfinderSize, newStream->width,
2014 newStream->height)) {
2015 numStreamsOnEncoder++;
2016 bJpegOnEncoder = true;
2017 }
2018 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
2019 newStream->width);
2020 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
2021 newStream->height);;
2022 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2023 "FATAL: max_downscale_factor cannot be zero and so assert");
2024 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2025 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2026 LOGH("Setting small jpeg size flag to true");
2027 bSmallJpegSize = true;
2028 }
2029 break;
2030 case HAL_PIXEL_FORMAT_RAW10:
2031 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2032 case HAL_PIXEL_FORMAT_RAW16:
2033 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002034 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2035 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2036 pdStatCount++;
2037 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002038 break;
2039 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2040 processedStreamCnt++;
2041 if (isOnEncoder(maxViewfinderSize, newStream->width,
2042 newStream->height)) {
2043 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2044 !IS_USAGE_ZSL(newStream->usage)) {
2045 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2046 }
2047 numStreamsOnEncoder++;
2048 }
2049 break;
2050 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2051 processedStreamCnt++;
2052 if (isOnEncoder(maxViewfinderSize, newStream->width,
2053 newStream->height)) {
2054 // If Yuv888 size is not greater than 4K, set feature mask
2055 // to SUPERSET so that it support concurrent request on
2056 // YUV and JPEG.
2057 if (newStream->width <= VIDEO_4K_WIDTH &&
2058 newStream->height <= VIDEO_4K_HEIGHT) {
2059 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2060 }
2061 numStreamsOnEncoder++;
2062 numYuv888OnEncoder++;
2063 largeYuv888Size.width = newStream->width;
2064 largeYuv888Size.height = newStream->height;
2065 }
2066 break;
2067 default:
2068 processedStreamCnt++;
2069 if (isOnEncoder(maxViewfinderSize, newStream->width,
2070 newStream->height)) {
2071 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2072 numStreamsOnEncoder++;
2073 }
2074 break;
2075 }
2076
2077 }
2078 }
2079
2080 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2081 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2082 !m_bIsVideo) {
2083 m_bEisEnable = false;
2084 }
2085
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002086 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2087 pthread_mutex_unlock(&mMutex);
2088 return -EINVAL;
2089 }
2090
Thierry Strudel54dc9782017-02-15 12:12:10 -08002091 uint8_t forceEnableTnr = 0;
2092 char tnr_prop[PROPERTY_VALUE_MAX];
2093 memset(tnr_prop, 0, sizeof(tnr_prop));
2094 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2095 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2096
Thierry Strudel3d639192016-09-09 11:52:26 -07002097 /* Logic to enable/disable TNR based on specific config size/etc.*/
2098 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002099 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2100 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002101 else if (forceEnableTnr)
2102 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002103
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002104 char videoHdrProp[PROPERTY_VALUE_MAX];
2105 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2106 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2107 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2108
2109 if (hdr_mode_prop == 1 && m_bIsVideo &&
2110 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2111 m_bVideoHdrEnabled = true;
2112 else
2113 m_bVideoHdrEnabled = false;
2114
2115
Thierry Strudel3d639192016-09-09 11:52:26 -07002116 /* Check if num_streams is sane */
2117 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2118 rawStreamCnt > MAX_RAW_STREAMS ||
2119 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2120 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2121 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2122 pthread_mutex_unlock(&mMutex);
2123 return -EINVAL;
2124 }
2125 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002126 if (isZsl && m_bIs4KVideo) {
2127 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002128 pthread_mutex_unlock(&mMutex);
2129 return -EINVAL;
2130 }
2131 /* Check if stream sizes are sane */
2132 if (numStreamsOnEncoder > 2) {
2133 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2134 pthread_mutex_unlock(&mMutex);
2135 return -EINVAL;
2136 } else if (1 < numStreamsOnEncoder){
2137 bUseCommonFeatureMask = true;
2138 LOGH("Multiple streams above max viewfinder size, common mask needed");
2139 }
2140
2141 /* Check if BLOB size is greater than 4k in 4k recording case */
2142 if (m_bIs4KVideo && bJpegExceeds4K) {
2143 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2144 pthread_mutex_unlock(&mMutex);
2145 return -EINVAL;
2146 }
2147
Emilian Peev7650c122017-01-19 08:24:33 -08002148 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2149 depthPresent) {
2150 LOGE("HAL doesn't support depth streams in HFR mode!");
2151 pthread_mutex_unlock(&mMutex);
2152 return -EINVAL;
2153 }
2154
Thierry Strudel3d639192016-09-09 11:52:26 -07002155 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2156 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2157 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2158 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2159 // configurations:
2160 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2161 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2162 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2163 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2164 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2165 __func__);
2166 pthread_mutex_unlock(&mMutex);
2167 return -EINVAL;
2168 }
2169
2170 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2171 // the YUV stream's size is greater or equal to the JPEG size, set common
2172 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2173 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2174 jpegSize.width, jpegSize.height) &&
2175 largeYuv888Size.width > jpegSize.width &&
2176 largeYuv888Size.height > jpegSize.height) {
2177 bYuv888OverrideJpeg = true;
2178 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2179 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2180 }
2181
2182 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2183 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2184 commonFeatureMask);
2185 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2186 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2187
2188 rc = validateStreamDimensions(streamList);
2189 if (rc == NO_ERROR) {
2190 rc = validateStreamRotations(streamList);
2191 }
2192 if (rc != NO_ERROR) {
2193 LOGE("Invalid stream configuration requested!");
2194 pthread_mutex_unlock(&mMutex);
2195 return rc;
2196 }
2197
Emilian Peev0f3c3162017-03-15 12:57:46 +00002198 if (1 < pdStatCount) {
2199 LOGE("HAL doesn't support multiple PD streams");
2200 pthread_mutex_unlock(&mMutex);
2201 return -EINVAL;
2202 }
2203
2204 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2205 (1 == pdStatCount)) {
2206 LOGE("HAL doesn't support PD streams in HFR mode!");
2207 pthread_mutex_unlock(&mMutex);
2208 return -EINVAL;
2209 }
2210
Thierry Strudel3d639192016-09-09 11:52:26 -07002211 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2212 for (size_t i = 0; i < streamList->num_streams; i++) {
2213 camera3_stream_t *newStream = streamList->streams[i];
2214 LOGH("newStream type = %d, stream format = %d "
2215 "stream size : %d x %d, stream rotation = %d",
2216 newStream->stream_type, newStream->format,
2217 newStream->width, newStream->height, newStream->rotation);
2218 //if the stream is in the mStreamList validate it
2219 bool stream_exists = false;
2220 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2221 it != mStreamInfo.end(); it++) {
2222 if ((*it)->stream == newStream) {
2223 QCamera3ProcessingChannel *channel =
2224 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2225 stream_exists = true;
2226 if (channel)
2227 delete channel;
2228 (*it)->status = VALID;
2229 (*it)->stream->priv = NULL;
2230 (*it)->channel = NULL;
2231 }
2232 }
2233 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2234 //new stream
2235 stream_info_t* stream_info;
2236 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2237 if (!stream_info) {
2238 LOGE("Could not allocate stream info");
2239 rc = -ENOMEM;
2240 pthread_mutex_unlock(&mMutex);
2241 return rc;
2242 }
2243 stream_info->stream = newStream;
2244 stream_info->status = VALID;
2245 stream_info->channel = NULL;
Chien-Yu Chen3d836272017-09-20 11:10:21 -07002246 stream_info->id = i; // ID will be re-assigned in cleanAndSortStreamInfo().
Thierry Strudel3d639192016-09-09 11:52:26 -07002247 mStreamInfo.push_back(stream_info);
2248 }
2249 /* Covers Opaque ZSL and API1 F/W ZSL */
2250 if (IS_USAGE_ZSL(newStream->usage)
2251 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2252 if (zslStream != NULL) {
2253 LOGE("Multiple input/reprocess streams requested!");
2254 pthread_mutex_unlock(&mMutex);
2255 return BAD_VALUE;
2256 }
2257 zslStream = newStream;
2258 }
2259 /* Covers YUV reprocess */
2260 if (inputStream != NULL) {
2261 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2262 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2263 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2264 && inputStream->width == newStream->width
2265 && inputStream->height == newStream->height) {
2266 if (zslStream != NULL) {
2267 /* This scenario indicates multiple YUV streams with same size
2268 * as input stream have been requested, since zsl stream handle
2269 * is solely use for the purpose of overriding the size of streams
2270 * which share h/w streams we will just make a guess here as to
2271 * which of the stream is a ZSL stream, this will be refactored
2272 * once we make generic logic for streams sharing encoder output
2273 */
2274 LOGH("Warning, Multiple ip/reprocess streams requested!");
2275 }
2276 zslStream = newStream;
2277 }
2278 }
2279 }
2280
2281 /* If a zsl stream is set, we know that we have configured at least one input or
2282 bidirectional stream */
2283 if (NULL != zslStream) {
2284 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2285 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2286 mInputStreamInfo.format = zslStream->format;
2287 mInputStreamInfo.usage = zslStream->usage;
2288 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2289 mInputStreamInfo.dim.width,
2290 mInputStreamInfo.dim.height,
2291 mInputStreamInfo.format, mInputStreamInfo.usage);
2292 }
2293
2294 cleanAndSortStreamInfo();
2295 if (mMetadataChannel) {
2296 delete mMetadataChannel;
2297 mMetadataChannel = NULL;
2298 }
2299 if (mSupportChannel) {
2300 delete mSupportChannel;
2301 mSupportChannel = NULL;
2302 }
2303
2304 if (mAnalysisChannel) {
2305 delete mAnalysisChannel;
2306 mAnalysisChannel = NULL;
2307 }
2308
2309 if (mDummyBatchChannel) {
2310 delete mDummyBatchChannel;
2311 mDummyBatchChannel = NULL;
2312 }
2313
Emilian Peev7650c122017-01-19 08:24:33 -08002314 if (mDepthChannel) {
2315 mDepthChannel = NULL;
2316 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002317 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002318
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002319 mShutterDispatcher.clear();
2320 mOutputBufferDispatcher.clear();
2321
Thierry Strudel2896d122017-02-23 19:18:03 -08002322 char is_type_value[PROPERTY_VALUE_MAX];
2323 property_get("persist.camera.is_type", is_type_value, "4");
2324 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2325
Binhao Line406f062017-05-03 14:39:44 -07002326 char property_value[PROPERTY_VALUE_MAX];
2327 property_get("persist.camera.gzoom.at", property_value, "0");
2328 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002329 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2330 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2331 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2332 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002333
2334 property_get("persist.camera.gzoom.4k", property_value, "0");
2335 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2336
Thierry Strudel3d639192016-09-09 11:52:26 -07002337 //Create metadata channel and initialize it
2338 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2339 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2340 gCamCapability[mCameraId]->color_arrangement);
2341 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2342 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002343 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002344 if (mMetadataChannel == NULL) {
2345 LOGE("failed to allocate metadata channel");
2346 rc = -ENOMEM;
2347 pthread_mutex_unlock(&mMutex);
2348 return rc;
2349 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002350 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002351 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2352 if (rc < 0) {
2353 LOGE("metadata channel initialization failed");
2354 delete mMetadataChannel;
2355 mMetadataChannel = NULL;
2356 pthread_mutex_unlock(&mMutex);
2357 return rc;
2358 }
2359
Thierry Strudel2896d122017-02-23 19:18:03 -08002360 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002361 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002362 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002363 // Keep track of preview/video streams indices.
2364 // There could be more than one preview streams, but only one video stream.
2365 int32_t video_stream_idx = -1;
2366 int32_t preview_stream_idx[streamList->num_streams];
2367 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002368 bool previewTnr[streamList->num_streams];
2369 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2370 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2371 // Loop through once to determine preview TNR conditions before creating channels.
2372 for (size_t i = 0; i < streamList->num_streams; i++) {
2373 camera3_stream_t *newStream = streamList->streams[i];
2374 uint32_t stream_usage = newStream->usage;
2375 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2376 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2377 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2378 video_stream_idx = (int32_t)i;
2379 else
2380 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2381 }
2382 }
2383 // By default, preview stream TNR is disabled.
2384 // Enable TNR to the preview stream if all conditions below are satisfied:
2385 // 1. preview resolution == video resolution.
2386 // 2. video stream TNR is enabled.
2387 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2388 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2389 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2390 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2391 if (m_bTnrEnabled && m_bTnrVideo &&
2392 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2393 video_stream->width == preview_stream->width &&
2394 video_stream->height == preview_stream->height) {
2395 previewTnr[preview_stream_idx[i]] = true;
2396 }
2397 }
2398
Thierry Strudel3d639192016-09-09 11:52:26 -07002399 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2400 /* Allocate channel objects for the requested streams */
2401 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002402
Thierry Strudel3d639192016-09-09 11:52:26 -07002403 camera3_stream_t *newStream = streamList->streams[i];
2404 uint32_t stream_usage = newStream->usage;
2405 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2406 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2407 struct camera_info *p_info = NULL;
2408 pthread_mutex_lock(&gCamLock);
2409 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2410 pthread_mutex_unlock(&gCamLock);
2411 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2412 || IS_USAGE_ZSL(newStream->usage)) &&
2413 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002414 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002415 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002416 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2417 if (bUseCommonFeatureMask)
2418 zsl_ppmask = commonFeatureMask;
2419 else
2420 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002421 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002422 if (numStreamsOnEncoder > 0)
2423 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2424 else
2425 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002426 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002427 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002428 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002429 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002430 LOGH("Input stream configured, reprocess config");
2431 } else {
2432 //for non zsl streams find out the format
2433 switch (newStream->format) {
2434 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2435 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002436 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2438 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2439 /* add additional features to pp feature mask */
2440 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2441 mStreamConfigInfo.num_streams);
2442
2443 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2444 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2445 CAM_STREAM_TYPE_VIDEO;
2446 if (m_bTnrEnabled && m_bTnrVideo) {
2447 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2448 CAM_QCOM_FEATURE_CPP_TNR;
2449 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2450 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2451 ~CAM_QCOM_FEATURE_CDS;
2452 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002453 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2454 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2455 CAM_QTI_FEATURE_PPEISCORE;
2456 }
Binhao Line406f062017-05-03 14:39:44 -07002457 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2458 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2459 CAM_QCOM_FEATURE_GOOG_ZOOM;
2460 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002461 } else {
2462 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2463 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002464 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002465 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2466 CAM_QCOM_FEATURE_CPP_TNR;
2467 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2468 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2469 ~CAM_QCOM_FEATURE_CDS;
2470 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002471 if(!m_bSwTnrPreview) {
2472 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2473 ~CAM_QTI_FEATURE_SW_TNR;
2474 }
Binhao Line406f062017-05-03 14:39:44 -07002475 if (is_goog_zoom_preview_enabled) {
2476 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2477 CAM_QCOM_FEATURE_GOOG_ZOOM;
2478 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002479 padding_info.width_padding = mSurfaceStridePadding;
2480 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002481 previewSize.width = (int32_t)newStream->width;
2482 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002483 }
2484 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2485 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2486 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2487 newStream->height;
2488 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2489 newStream->width;
2490 }
2491 }
2492 break;
2493 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002494 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002495 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2496 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2497 if (bUseCommonFeatureMask)
2498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2499 commonFeatureMask;
2500 else
2501 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2502 CAM_QCOM_FEATURE_NONE;
2503 } else {
2504 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2505 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2506 }
2507 break;
2508 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002509 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002510 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2511 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2512 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2513 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2514 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002515 /* Remove rotation if it is not supported
2516 for 4K LiveVideo snapshot case (online processing) */
2517 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2518 CAM_QCOM_FEATURE_ROTATION)) {
2519 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2520 &= ~CAM_QCOM_FEATURE_ROTATION;
2521 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002522 } else {
2523 if (bUseCommonFeatureMask &&
2524 isOnEncoder(maxViewfinderSize, newStream->width,
2525 newStream->height)) {
2526 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2527 } else {
2528 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2529 }
2530 }
2531 if (isZsl) {
2532 if (zslStream) {
2533 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2534 (int32_t)zslStream->width;
2535 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2536 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002537 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2538 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002539 } else {
2540 LOGE("Error, No ZSL stream identified");
2541 pthread_mutex_unlock(&mMutex);
2542 return -EINVAL;
2543 }
2544 } else if (m_bIs4KVideo) {
2545 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2546 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2547 } else if (bYuv888OverrideJpeg) {
2548 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2549 (int32_t)largeYuv888Size.width;
2550 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2551 (int32_t)largeYuv888Size.height;
2552 }
2553 break;
2554 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2555 case HAL_PIXEL_FORMAT_RAW16:
2556 case HAL_PIXEL_FORMAT_RAW10:
2557 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2558 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2559 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002560 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2561 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2562 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2563 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2564 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2565 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2566 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2567 gCamCapability[mCameraId]->dt[mPDIndex];
2568 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2569 gCamCapability[mCameraId]->vc[mPDIndex];
2570 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002571 break;
2572 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002573 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002574 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2575 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2576 break;
2577 }
2578 }
2579
2580 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2581 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2582 gCamCapability[mCameraId]->color_arrangement);
2583
2584 if (newStream->priv == NULL) {
2585 //New stream, construct channel
2586 switch (newStream->stream_type) {
2587 case CAMERA3_STREAM_INPUT:
2588 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2589 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2590 break;
2591 case CAMERA3_STREAM_BIDIRECTIONAL:
2592 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2593 GRALLOC_USAGE_HW_CAMERA_WRITE;
2594 break;
2595 case CAMERA3_STREAM_OUTPUT:
2596 /* For video encoding stream, set read/write rarely
2597 * flag so that they may be set to un-cached */
2598 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2599 newStream->usage |=
2600 (GRALLOC_USAGE_SW_READ_RARELY |
2601 GRALLOC_USAGE_SW_WRITE_RARELY |
2602 GRALLOC_USAGE_HW_CAMERA_WRITE);
2603 else if (IS_USAGE_ZSL(newStream->usage))
2604 {
2605 LOGD("ZSL usage flag skipping");
2606 }
2607 else if (newStream == zslStream
2608 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2609 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2610 } else
2611 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2612 break;
2613 default:
2614 LOGE("Invalid stream_type %d", newStream->stream_type);
2615 break;
2616 }
2617
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002618 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002619 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2620 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2621 QCamera3ProcessingChannel *channel = NULL;
2622 switch (newStream->format) {
2623 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2624 if ((newStream->usage &
2625 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2626 (streamList->operation_mode ==
2627 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2628 ) {
2629 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2630 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002631 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002632 this,
2633 newStream,
2634 (cam_stream_type_t)
2635 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2636 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2637 mMetadataChannel,
2638 0); //heap buffers are not required for HFR video channel
2639 if (channel == NULL) {
2640 LOGE("allocation of channel failed");
2641 pthread_mutex_unlock(&mMutex);
2642 return -ENOMEM;
2643 }
2644 //channel->getNumBuffers() will return 0 here so use
2645 //MAX_INFLIGH_HFR_REQUESTS
2646 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2647 newStream->priv = channel;
2648 LOGI("num video buffers in HFR mode: %d",
2649 MAX_INFLIGHT_HFR_REQUESTS);
2650 } else {
2651 /* Copy stream contents in HFR preview only case to create
2652 * dummy batch channel so that sensor streaming is in
2653 * HFR mode */
2654 if (!m_bIsVideo && (streamList->operation_mode ==
2655 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2656 mDummyBatchStream = *newStream;
2657 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002658 int bufferCount = MAX_INFLIGHT_REQUESTS;
2659 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2660 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002661 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2662 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2663 bufferCount = m_bIs4KVideo ?
2664 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2665 }
2666
Thierry Strudel2896d122017-02-23 19:18:03 -08002667 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002668 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2669 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002670 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002671 this,
2672 newStream,
2673 (cam_stream_type_t)
2674 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2675 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2676 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002677 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002678 if (channel == NULL) {
2679 LOGE("allocation of channel failed");
2680 pthread_mutex_unlock(&mMutex);
2681 return -ENOMEM;
2682 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002683 /* disable UBWC for preview, though supported,
2684 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002685 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002686 (previewSize.width == (int32_t)videoWidth)&&
2687 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002688 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002689 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002690 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002691 /* When goog_zoom is linked to the preview or video stream,
2692 * disable ubwc to the linked stream */
2693 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2694 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2695 channel->setUBWCEnabled(false);
2696 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002697 newStream->max_buffers = channel->getNumBuffers();
2698 newStream->priv = channel;
2699 }
2700 break;
2701 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2702 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2703 mChannelHandle,
2704 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002705 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 this,
2707 newStream,
2708 (cam_stream_type_t)
2709 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2710 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2711 mMetadataChannel);
2712 if (channel == NULL) {
2713 LOGE("allocation of YUV channel failed");
2714 pthread_mutex_unlock(&mMutex);
2715 return -ENOMEM;
2716 }
2717 newStream->max_buffers = channel->getNumBuffers();
2718 newStream->priv = channel;
2719 break;
2720 }
2721 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2722 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002723 case HAL_PIXEL_FORMAT_RAW10: {
2724 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2725 (HAL_DATASPACE_DEPTH != newStream->data_space))
2726 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002727 mRawChannel = new QCamera3RawChannel(
2728 mCameraHandle->camera_handle, mChannelHandle,
2729 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002730 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002731 this, newStream,
2732 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002733 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002734 if (mRawChannel == NULL) {
2735 LOGE("allocation of raw channel failed");
2736 pthread_mutex_unlock(&mMutex);
2737 return -ENOMEM;
2738 }
2739 newStream->max_buffers = mRawChannel->getNumBuffers();
2740 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2741 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002742 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002743 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002744 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2745 mDepthChannel = new QCamera3DepthChannel(
2746 mCameraHandle->camera_handle, mChannelHandle,
2747 mCameraHandle->ops, NULL, NULL, &padding_info,
2748 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2749 mMetadataChannel);
2750 if (NULL == mDepthChannel) {
2751 LOGE("Allocation of depth channel failed");
2752 pthread_mutex_unlock(&mMutex);
2753 return NO_MEMORY;
2754 }
2755 newStream->priv = mDepthChannel;
2756 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2757 } else {
2758 // Max live snapshot inflight buffer is 1. This is to mitigate
2759 // frame drop issues for video snapshot. The more buffers being
2760 // allocated, the more frame drops there are.
2761 mPictureChannel = new QCamera3PicChannel(
2762 mCameraHandle->camera_handle, mChannelHandle,
2763 mCameraHandle->ops, captureResultCb,
2764 setBufferErrorStatus, &padding_info, this, newStream,
2765 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2766 m_bIs4KVideo, isZsl, mMetadataChannel,
2767 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2768 if (mPictureChannel == NULL) {
2769 LOGE("allocation of channel failed");
2770 pthread_mutex_unlock(&mMutex);
2771 return -ENOMEM;
2772 }
2773 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2774 newStream->max_buffers = mPictureChannel->getNumBuffers();
2775 mPictureChannel->overrideYuvSize(
2776 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2777 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002778 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002779 break;
2780
2781 default:
2782 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002783 pthread_mutex_unlock(&mMutex);
2784 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002785 }
2786 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2787 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2788 } else {
2789 LOGE("Error, Unknown stream type");
2790 pthread_mutex_unlock(&mMutex);
2791 return -EINVAL;
2792 }
2793
2794 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002795 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002796 // Here we only care whether it's EIS3 or not
2797 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2798 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2799 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2800 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002801 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002802 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002803 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002804 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2805 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2806 }
2807 }
2808
2809 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2810 it != mStreamInfo.end(); it++) {
2811 if ((*it)->stream == newStream) {
2812 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2813 break;
2814 }
2815 }
2816 } else {
2817 // Channel already exists for this stream
2818 // Do nothing for now
2819 }
2820 padding_info = gCamCapability[mCameraId]->padding_info;
2821
Emilian Peev7650c122017-01-19 08:24:33 -08002822 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002823 * since there is no real stream associated with it
2824 */
Emilian Peev7650c122017-01-19 08:24:33 -08002825 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002826 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2827 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002828 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002829 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002830 }
2831
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002832 // Let buffer dispatcher know the configured streams.
2833 mOutputBufferDispatcher.configureStreams(streamList);
2834
Thierry Strudel2896d122017-02-23 19:18:03 -08002835 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2836 onlyRaw = false;
2837 }
2838
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002839 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002840 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002841 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002842 cam_analysis_info_t analysisInfo;
2843 int32_t ret = NO_ERROR;
2844 ret = mCommon.getAnalysisInfo(
2845 FALSE,
2846 analysisFeatureMask,
2847 &analysisInfo);
2848 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002849 cam_color_filter_arrangement_t analysis_color_arrangement =
2850 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2851 CAM_FILTER_ARRANGEMENT_Y :
2852 gCamCapability[mCameraId]->color_arrangement);
2853 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2854 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002855 cam_dimension_t analysisDim;
2856 analysisDim = mCommon.getMatchingDimension(previewSize,
2857 analysisInfo.analysis_recommended_res);
2858
2859 mAnalysisChannel = new QCamera3SupportChannel(
2860 mCameraHandle->camera_handle,
2861 mChannelHandle,
2862 mCameraHandle->ops,
2863 &analysisInfo.analysis_padding_info,
2864 analysisFeatureMask,
2865 CAM_STREAM_TYPE_ANALYSIS,
2866 &analysisDim,
2867 (analysisInfo.analysis_format
2868 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2869 : CAM_FORMAT_YUV_420_NV21),
2870 analysisInfo.hw_analysis_supported,
2871 gCamCapability[mCameraId]->color_arrangement,
2872 this,
2873 0); // force buffer count to 0
2874 } else {
2875 LOGW("getAnalysisInfo failed, ret = %d", ret);
2876 }
2877 if (!mAnalysisChannel) {
2878 LOGW("Analysis channel cannot be created");
2879 }
2880 }
2881
Thierry Strudel3d639192016-09-09 11:52:26 -07002882 //RAW DUMP channel
2883 if (mEnableRawDump && isRawStreamRequested == false){
2884 cam_dimension_t rawDumpSize;
2885 rawDumpSize = getMaxRawSize(mCameraId);
2886 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2887 setPAAFSupport(rawDumpFeatureMask,
2888 CAM_STREAM_TYPE_RAW,
2889 gCamCapability[mCameraId]->color_arrangement);
2890 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2891 mChannelHandle,
2892 mCameraHandle->ops,
2893 rawDumpSize,
2894 &padding_info,
2895 this, rawDumpFeatureMask);
2896 if (!mRawDumpChannel) {
2897 LOGE("Raw Dump channel cannot be created");
2898 pthread_mutex_unlock(&mMutex);
2899 return -ENOMEM;
2900 }
2901 }
2902
Thierry Strudel3d639192016-09-09 11:52:26 -07002903 if (mAnalysisChannel) {
2904 cam_analysis_info_t analysisInfo;
2905 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2906 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2907 CAM_STREAM_TYPE_ANALYSIS;
2908 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2909 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002910 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002911 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2912 &analysisInfo);
2913 if (rc != NO_ERROR) {
2914 LOGE("getAnalysisInfo failed, ret = %d", rc);
2915 pthread_mutex_unlock(&mMutex);
2916 return rc;
2917 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002918 cam_color_filter_arrangement_t analysis_color_arrangement =
2919 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2920 CAM_FILTER_ARRANGEMENT_Y :
2921 gCamCapability[mCameraId]->color_arrangement);
2922 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2923 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2924 analysis_color_arrangement);
2925
Thierry Strudel3d639192016-09-09 11:52:26 -07002926 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002927 mCommon.getMatchingDimension(previewSize,
2928 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002929 mStreamConfigInfo.num_streams++;
2930 }
2931
Thierry Strudel2896d122017-02-23 19:18:03 -08002932 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002933 cam_analysis_info_t supportInfo;
2934 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2935 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2936 setPAAFSupport(callbackFeatureMask,
2937 CAM_STREAM_TYPE_CALLBACK,
2938 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002939 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002940 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002941 if (ret != NO_ERROR) {
2942 /* Ignore the error for Mono camera
2943 * because the PAAF bit mask is only set
2944 * for CAM_STREAM_TYPE_ANALYSIS stream type
2945 */
2946 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2947 LOGW("getAnalysisInfo failed, ret = %d", ret);
2948 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002949 }
2950 mSupportChannel = new QCamera3SupportChannel(
2951 mCameraHandle->camera_handle,
2952 mChannelHandle,
2953 mCameraHandle->ops,
2954 &gCamCapability[mCameraId]->padding_info,
2955 callbackFeatureMask,
2956 CAM_STREAM_TYPE_CALLBACK,
2957 &QCamera3SupportChannel::kDim,
2958 CAM_FORMAT_YUV_420_NV21,
2959 supportInfo.hw_analysis_supported,
2960 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002961 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002962 if (!mSupportChannel) {
2963 LOGE("dummy channel cannot be created");
2964 pthread_mutex_unlock(&mMutex);
2965 return -ENOMEM;
2966 }
2967 }
2968
2969 if (mSupportChannel) {
2970 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2971 QCamera3SupportChannel::kDim;
2972 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2973 CAM_STREAM_TYPE_CALLBACK;
2974 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2975 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2976 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2977 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2978 gCamCapability[mCameraId]->color_arrangement);
2979 mStreamConfigInfo.num_streams++;
2980 }
2981
2982 if (mRawDumpChannel) {
2983 cam_dimension_t rawSize;
2984 rawSize = getMaxRawSize(mCameraId);
2985 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2986 rawSize;
2987 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2988 CAM_STREAM_TYPE_RAW;
2989 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2990 CAM_QCOM_FEATURE_NONE;
2991 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2992 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2993 gCamCapability[mCameraId]->color_arrangement);
2994 mStreamConfigInfo.num_streams++;
2995 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002996
2997 if (mHdrPlusRawSrcChannel) {
2998 cam_dimension_t rawSize;
2999 rawSize = getMaxRawSize(mCameraId);
3000 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
3001 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
3002 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
3003 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3004 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3005 gCamCapability[mCameraId]->color_arrangement);
3006 mStreamConfigInfo.num_streams++;
3007 }
3008
Thierry Strudel3d639192016-09-09 11:52:26 -07003009 /* In HFR mode, if video stream is not added, create a dummy channel so that
3010 * ISP can create a batch mode even for preview only case. This channel is
3011 * never 'start'ed (no stream-on), it is only 'initialized' */
3012 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
3013 !m_bIsVideo) {
3014 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3015 setPAAFSupport(dummyFeatureMask,
3016 CAM_STREAM_TYPE_VIDEO,
3017 gCamCapability[mCameraId]->color_arrangement);
3018 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
3019 mChannelHandle,
3020 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003021 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07003022 this,
3023 &mDummyBatchStream,
3024 CAM_STREAM_TYPE_VIDEO,
3025 dummyFeatureMask,
3026 mMetadataChannel);
3027 if (NULL == mDummyBatchChannel) {
3028 LOGE("creation of mDummyBatchChannel failed."
3029 "Preview will use non-hfr sensor mode ");
3030 }
3031 }
3032 if (mDummyBatchChannel) {
3033 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3034 mDummyBatchStream.width;
3035 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3036 mDummyBatchStream.height;
3037 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3038 CAM_STREAM_TYPE_VIDEO;
3039 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3040 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3041 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3042 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3043 gCamCapability[mCameraId]->color_arrangement);
3044 mStreamConfigInfo.num_streams++;
3045 }
3046
3047 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3048 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003049 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003050 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003051
3052 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3053 for (pendingRequestIterator i = mPendingRequestsList.begin();
3054 i != mPendingRequestsList.end();) {
3055 i = erasePendingRequest(i);
3056 }
3057 mPendingFrameDropList.clear();
3058 // Initialize/Reset the pending buffers list
3059 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3060 req.mPendingBufferList.clear();
3061 }
3062 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003063 mExpectedInflightDuration = 0;
3064 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003065
Thierry Strudel3d639192016-09-09 11:52:26 -07003066 mCurJpegMeta.clear();
3067 //Get min frame duration for this streams configuration
3068 deriveMinFrameDuration();
3069
Chien-Yu Chenee335912017-02-09 17:53:20 -08003070 mFirstPreviewIntentSeen = false;
3071
Thierry Strudel3d639192016-09-09 11:52:26 -07003072 // Update state
3073 mState = CONFIGURED;
3074
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003075 mFirstMetadataCallback = true;
3076
Thierry Strudel3d639192016-09-09 11:52:26 -07003077 pthread_mutex_unlock(&mMutex);
3078
3079 return rc;
3080}
3081
3082/*===========================================================================
3083 * FUNCTION : validateCaptureRequest
3084 *
3085 * DESCRIPTION: validate a capture request from camera service
3086 *
3087 * PARAMETERS :
3088 * @request : request from framework to process
3089 *
3090 * RETURN :
3091 *
3092 *==========================================================================*/
3093int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003094 camera3_capture_request_t *request,
3095 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003096{
3097 ssize_t idx = 0;
3098 const camera3_stream_buffer_t *b;
3099 CameraMetadata meta;
3100
3101 /* Sanity check the request */
3102 if (request == NULL) {
3103 LOGE("NULL capture request");
3104 return BAD_VALUE;
3105 }
3106
3107 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3108 /*settings cannot be null for the first request*/
3109 return BAD_VALUE;
3110 }
3111
3112 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003113 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3114 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003115 LOGE("Request %d: No output buffers provided!",
3116 __FUNCTION__, frameNumber);
3117 return BAD_VALUE;
3118 }
3119 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3120 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3121 request->num_output_buffers, MAX_NUM_STREAMS);
3122 return BAD_VALUE;
3123 }
3124 if (request->input_buffer != NULL) {
3125 b = request->input_buffer;
3126 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3127 LOGE("Request %d: Buffer %ld: Status not OK!",
3128 frameNumber, (long)idx);
3129 return BAD_VALUE;
3130 }
3131 if (b->release_fence != -1) {
3132 LOGE("Request %d: Buffer %ld: Has a release fence!",
3133 frameNumber, (long)idx);
3134 return BAD_VALUE;
3135 }
3136 if (b->buffer == NULL) {
3137 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3138 frameNumber, (long)idx);
3139 return BAD_VALUE;
3140 }
3141 }
3142
3143 // Validate all buffers
3144 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003145 if (b == NULL) {
3146 return BAD_VALUE;
3147 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003148 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003149 QCamera3ProcessingChannel *channel =
3150 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3151 if (channel == NULL) {
3152 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3153 frameNumber, (long)idx);
3154 return BAD_VALUE;
3155 }
3156 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3157 LOGE("Request %d: Buffer %ld: Status not OK!",
3158 frameNumber, (long)idx);
3159 return BAD_VALUE;
3160 }
3161 if (b->release_fence != -1) {
3162 LOGE("Request %d: Buffer %ld: Has a release fence!",
3163 frameNumber, (long)idx);
3164 return BAD_VALUE;
3165 }
3166 if (b->buffer == NULL) {
3167 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3168 frameNumber, (long)idx);
3169 return BAD_VALUE;
3170 }
3171 if (*(b->buffer) == NULL) {
3172 LOGE("Request %d: Buffer %ld: NULL private handle!",
3173 frameNumber, (long)idx);
3174 return BAD_VALUE;
3175 }
3176 idx++;
3177 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003178 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003179 return NO_ERROR;
3180}
3181
3182/*===========================================================================
3183 * FUNCTION : deriveMinFrameDuration
3184 *
3185 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3186 * on currently configured streams.
3187 *
3188 * PARAMETERS : NONE
3189 *
3190 * RETURN : NONE
3191 *
3192 *==========================================================================*/
3193void QCamera3HardwareInterface::deriveMinFrameDuration()
3194{
3195 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003196 bool hasRaw = false;
3197
3198 mMinRawFrameDuration = 0;
3199 mMinJpegFrameDuration = 0;
3200 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003201
3202 maxJpegDim = 0;
3203 maxProcessedDim = 0;
3204 maxRawDim = 0;
3205
3206 // Figure out maximum jpeg, processed, and raw dimensions
3207 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3208 it != mStreamInfo.end(); it++) {
3209
3210 // Input stream doesn't have valid stream_type
3211 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3212 continue;
3213
3214 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3215 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3216 if (dimension > maxJpegDim)
3217 maxJpegDim = dimension;
3218 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3219 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3220 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003221 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003222 if (dimension > maxRawDim)
3223 maxRawDim = dimension;
3224 } else {
3225 if (dimension > maxProcessedDim)
3226 maxProcessedDim = dimension;
3227 }
3228 }
3229
3230 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3231 MAX_SIZES_CNT);
3232
3233 //Assume all jpeg dimensions are in processed dimensions.
3234 if (maxJpegDim > maxProcessedDim)
3235 maxProcessedDim = maxJpegDim;
3236 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003237 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003238 maxRawDim = INT32_MAX;
3239
3240 for (size_t i = 0; i < count; i++) {
3241 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3242 gCamCapability[mCameraId]->raw_dim[i].height;
3243 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3244 maxRawDim = dimension;
3245 }
3246 }
3247
3248 //Find minimum durations for processed, jpeg, and raw
3249 for (size_t i = 0; i < count; i++) {
3250 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3251 gCamCapability[mCameraId]->raw_dim[i].height) {
3252 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3253 break;
3254 }
3255 }
3256 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3257 for (size_t i = 0; i < count; i++) {
3258 if (maxProcessedDim ==
3259 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3260 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3261 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3262 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3263 break;
3264 }
3265 }
3266}
3267
3268/*===========================================================================
3269 * FUNCTION : getMinFrameDuration
3270 *
3271 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3272 * and current request configuration.
3273 *
3274 * PARAMETERS : @request: requset sent by the frameworks
3275 *
3276 * RETURN : min farme duration for a particular request
3277 *
3278 *==========================================================================*/
3279int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3280{
3281 bool hasJpegStream = false;
3282 bool hasRawStream = false;
3283 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3284 const camera3_stream_t *stream = request->output_buffers[i].stream;
3285 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3286 hasJpegStream = true;
3287 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3288 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3289 stream->format == HAL_PIXEL_FORMAT_RAW16)
3290 hasRawStream = true;
3291 }
3292
3293 if (!hasJpegStream)
3294 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3295 else
3296 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3297}
3298
3299/*===========================================================================
3300 * FUNCTION : handleBuffersDuringFlushLock
3301 *
3302 * DESCRIPTION: Account for buffers returned from back-end during flush
3303 * This function is executed while mMutex is held by the caller.
3304 *
3305 * PARAMETERS :
3306 * @buffer: image buffer for the callback
3307 *
3308 * RETURN :
3309 *==========================================================================*/
3310void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3311{
3312 bool buffer_found = false;
3313 for (List<PendingBuffersInRequest>::iterator req =
3314 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3315 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3316 for (List<PendingBufferInfo>::iterator i =
3317 req->mPendingBufferList.begin();
3318 i != req->mPendingBufferList.end(); i++) {
3319 if (i->buffer == buffer->buffer) {
3320 mPendingBuffersMap.numPendingBufsAtFlush--;
3321 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3322 buffer->buffer, req->frame_number,
3323 mPendingBuffersMap.numPendingBufsAtFlush);
3324 buffer_found = true;
3325 break;
3326 }
3327 }
3328 if (buffer_found) {
3329 break;
3330 }
3331 }
3332 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3333 //signal the flush()
3334 LOGD("All buffers returned to HAL. Continue flush");
3335 pthread_cond_signal(&mBuffersCond);
3336 }
3337}
3338
Thierry Strudel3d639192016-09-09 11:52:26 -07003339/*===========================================================================
3340 * FUNCTION : handleBatchMetadata
3341 *
3342 * DESCRIPTION: Handles metadata buffer callback in batch mode
3343 *
3344 * PARAMETERS : @metadata_buf: metadata buffer
3345 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3346 * the meta buf in this method
3347 *
3348 * RETURN :
3349 *
3350 *==========================================================================*/
3351void QCamera3HardwareInterface::handleBatchMetadata(
3352 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3353{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003354 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003355
3356 if (NULL == metadata_buf) {
3357 LOGE("metadata_buf is NULL");
3358 return;
3359 }
3360 /* In batch mode, the metdata will contain the frame number and timestamp of
3361 * the last frame in the batch. Eg: a batch containing buffers from request
3362 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3363 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3364 * multiple process_capture_results */
3365 metadata_buffer_t *metadata =
3366 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3367 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3368 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3369 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3370 uint32_t frame_number = 0, urgent_frame_number = 0;
3371 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3372 bool invalid_metadata = false;
3373 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3374 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003375 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003376
3377 int32_t *p_frame_number_valid =
3378 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3379 uint32_t *p_frame_number =
3380 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3381 int64_t *p_capture_time =
3382 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3383 int32_t *p_urgent_frame_number_valid =
3384 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3385 uint32_t *p_urgent_frame_number =
3386 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3387
3388 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3389 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3390 (NULL == p_urgent_frame_number)) {
3391 LOGE("Invalid metadata");
3392 invalid_metadata = true;
3393 } else {
3394 frame_number_valid = *p_frame_number_valid;
3395 last_frame_number = *p_frame_number;
3396 last_frame_capture_time = *p_capture_time;
3397 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3398 last_urgent_frame_number = *p_urgent_frame_number;
3399 }
3400
3401 /* In batchmode, when no video buffers are requested, set_parms are sent
3402 * for every capture_request. The difference between consecutive urgent
3403 * frame numbers and frame numbers should be used to interpolate the
3404 * corresponding frame numbers and time stamps */
3405 pthread_mutex_lock(&mMutex);
3406 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003407 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3408 if(idx < 0) {
3409 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3410 last_urgent_frame_number);
3411 mState = ERROR;
3412 pthread_mutex_unlock(&mMutex);
3413 return;
3414 }
3415 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003416 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3417 first_urgent_frame_number;
3418
3419 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3420 urgent_frame_number_valid,
3421 first_urgent_frame_number, last_urgent_frame_number);
3422 }
3423
3424 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003425 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3426 if(idx < 0) {
3427 LOGE("Invalid frame number received: %d. Irrecoverable error",
3428 last_frame_number);
3429 mState = ERROR;
3430 pthread_mutex_unlock(&mMutex);
3431 return;
3432 }
3433 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003434 frameNumDiff = last_frame_number + 1 -
3435 first_frame_number;
3436 mPendingBatchMap.removeItem(last_frame_number);
3437
3438 LOGD("frm: valid: %d frm_num: %d - %d",
3439 frame_number_valid,
3440 first_frame_number, last_frame_number);
3441
3442 }
3443 pthread_mutex_unlock(&mMutex);
3444
3445 if (urgent_frame_number_valid || frame_number_valid) {
3446 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3447 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3448 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3449 urgentFrameNumDiff, last_urgent_frame_number);
3450 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3451 LOGE("frameNumDiff: %d frameNum: %d",
3452 frameNumDiff, last_frame_number);
3453 }
3454
3455 for (size_t i = 0; i < loopCount; i++) {
3456 /* handleMetadataWithLock is called even for invalid_metadata for
3457 * pipeline depth calculation */
3458 if (!invalid_metadata) {
3459 /* Infer frame number. Batch metadata contains frame number of the
3460 * last frame */
3461 if (urgent_frame_number_valid) {
3462 if (i < urgentFrameNumDiff) {
3463 urgent_frame_number =
3464 first_urgent_frame_number + i;
3465 LOGD("inferred urgent frame_number: %d",
3466 urgent_frame_number);
3467 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3468 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3469 } else {
3470 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3471 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3472 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3473 }
3474 }
3475
3476 /* Infer frame number. Batch metadata contains frame number of the
3477 * last frame */
3478 if (frame_number_valid) {
3479 if (i < frameNumDiff) {
3480 frame_number = first_frame_number + i;
3481 LOGD("inferred frame_number: %d", frame_number);
3482 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3483 CAM_INTF_META_FRAME_NUMBER, frame_number);
3484 } else {
3485 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3486 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3487 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3488 }
3489 }
3490
3491 if (last_frame_capture_time) {
3492 //Infer timestamp
3493 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003494 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003496 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003497 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3498 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3499 LOGD("batch capture_time: %lld, capture_time: %lld",
3500 last_frame_capture_time, capture_time);
3501 }
3502 }
3503 pthread_mutex_lock(&mMutex);
3504 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003505 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003506 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3507 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003508 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003509 pthread_mutex_unlock(&mMutex);
3510 }
3511
3512 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003513 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003514 mMetadataChannel->bufDone(metadata_buf);
3515 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003516 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003517 }
3518}
3519
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003520void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3521 camera3_error_msg_code_t errorCode)
3522{
3523 camera3_notify_msg_t notify_msg;
3524 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3525 notify_msg.type = CAMERA3_MSG_ERROR;
3526 notify_msg.message.error.error_code = errorCode;
3527 notify_msg.message.error.error_stream = NULL;
3528 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003529 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003530
3531 return;
3532}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003533
3534/*===========================================================================
3535 * FUNCTION : sendPartialMetadataWithLock
3536 *
3537 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3538 *
3539 * PARAMETERS : @metadata: metadata buffer
3540 * @requestIter: The iterator for the pending capture request for
3541 * which the partial result is being sen
3542 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3543 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003544 * @isJumpstartMetadata: Whether this is a partial metadata for
3545 * jumpstart, i.e. even though it doesn't map to a valid partial
3546 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003547 *
3548 * RETURN :
3549 *
3550 *==========================================================================*/
3551
3552void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3553 metadata_buffer_t *metadata,
3554 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003555 bool lastUrgentMetadataInBatch,
3556 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003557{
3558 camera3_capture_result_t result;
3559 memset(&result, 0, sizeof(camera3_capture_result_t));
3560
3561 requestIter->partial_result_cnt++;
3562
3563 // Extract 3A metadata
3564 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003565 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3566 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003567 // Populate metadata result
3568 result.frame_number = requestIter->frame_number;
3569 result.num_output_buffers = 0;
3570 result.output_buffers = NULL;
3571 result.partial_result = requestIter->partial_result_cnt;
3572
3573 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003574 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003575 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3576 // Notify HDR+ client about the partial metadata.
3577 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3578 result.partial_result == PARTIAL_RESULT_COUNT);
3579 }
3580 }
3581
3582 orchestrateResult(&result);
3583 LOGD("urgent frame_number = %u", result.frame_number);
3584 free_camera_metadata((camera_metadata_t *)result.result);
3585}
3586
Thierry Strudel3d639192016-09-09 11:52:26 -07003587/*===========================================================================
3588 * FUNCTION : handleMetadataWithLock
3589 *
3590 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3591 *
3592 * PARAMETERS : @metadata_buf: metadata buffer
3593 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3594 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003595 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3596 * last urgent metadata in a batch. Always true for non-batch mode
3597 * @lastMetadataInBatch: Boolean to indicate whether this is the
3598 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003599 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3600 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003601 *
3602 * RETURN :
3603 *
3604 *==========================================================================*/
3605void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003606 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003607 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3608 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003609{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003610 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003611 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3612 //during flush do not send metadata from this thread
3613 LOGD("not sending metadata during flush or when mState is error");
3614 if (free_and_bufdone_meta_buf) {
3615 mMetadataChannel->bufDone(metadata_buf);
3616 free(metadata_buf);
3617 }
3618 return;
3619 }
3620
3621 //not in flush
3622 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3623 int32_t frame_number_valid, urgent_frame_number_valid;
3624 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003625 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003626 nsecs_t currentSysTime;
3627
3628 int32_t *p_frame_number_valid =
3629 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3630 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3631 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003632 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003633 int32_t *p_urgent_frame_number_valid =
3634 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3635 uint32_t *p_urgent_frame_number =
3636 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3637 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3638 metadata) {
3639 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3640 *p_frame_number_valid, *p_frame_number);
3641 }
3642
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003643 camera_metadata_t *resultMetadata = nullptr;
3644
Thierry Strudel3d639192016-09-09 11:52:26 -07003645 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3646 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3647 LOGE("Invalid metadata");
3648 if (free_and_bufdone_meta_buf) {
3649 mMetadataChannel->bufDone(metadata_buf);
3650 free(metadata_buf);
3651 }
3652 goto done_metadata;
3653 }
3654 frame_number_valid = *p_frame_number_valid;
3655 frame_number = *p_frame_number;
3656 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003657 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003658 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3659 urgent_frame_number = *p_urgent_frame_number;
3660 currentSysTime = systemTime(CLOCK_MONOTONIC);
3661
Jason Lee603176d2017-05-31 11:43:27 -07003662 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3663 const int tries = 3;
3664 nsecs_t bestGap, measured;
3665 for (int i = 0; i < tries; ++i) {
3666 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3667 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3668 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3669 const nsecs_t gap = tmono2 - tmono;
3670 if (i == 0 || gap < bestGap) {
3671 bestGap = gap;
3672 measured = tbase - ((tmono + tmono2) >> 1);
3673 }
3674 }
3675 capture_time -= measured;
3676 }
3677
Thierry Strudel3d639192016-09-09 11:52:26 -07003678 // Detect if buffers from any requests are overdue
3679 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003680 int64_t timeout;
3681 {
3682 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3683 // If there is a pending HDR+ request, the following requests may be blocked until the
3684 // HDR+ request is done. So allow a longer timeout.
3685 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3686 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003687 if (timeout < mExpectedInflightDuration) {
3688 timeout = mExpectedInflightDuration;
3689 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003690 }
3691
3692 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003693 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003694 assert(missed.stream->priv);
3695 if (missed.stream->priv) {
3696 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3697 assert(ch->mStreams[0]);
3698 if (ch->mStreams[0]) {
3699 LOGE("Cancel missing frame = %d, buffer = %p,"
3700 "stream type = %d, stream format = %d",
3701 req.frame_number, missed.buffer,
3702 ch->mStreams[0]->getMyType(), missed.stream->format);
3703 ch->timeoutFrame(req.frame_number);
3704 }
3705 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003706 }
3707 }
3708 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003709 //For the very first metadata callback, regardless whether it contains valid
3710 //frame number, send the partial metadata for the jumpstarting requests.
3711 //Note that this has to be done even if the metadata doesn't contain valid
3712 //urgent frame number, because in the case only 1 request is ever submitted
3713 //to HAL, there won't be subsequent valid urgent frame number.
3714 if (mFirstMetadataCallback) {
3715 for (pendingRequestIterator i =
3716 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3717 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003718 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3719 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003720 }
3721 }
3722 mFirstMetadataCallback = false;
3723 }
3724
Thierry Strudel3d639192016-09-09 11:52:26 -07003725 //Partial result on process_capture_result for timestamp
3726 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003727 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003728
3729 //Recieved an urgent Frame Number, handle it
3730 //using partial results
3731 for (pendingRequestIterator i =
3732 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3733 LOGD("Iterator Frame = %d urgent frame = %d",
3734 i->frame_number, urgent_frame_number);
3735
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003736 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07003737 (i->partial_result_cnt == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003738 LOGE("Error: HAL missed urgent metadata for frame number %d",
3739 i->frame_number);
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07003740 i->partialResultDropped = true;
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003741 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003742 }
3743
3744 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003745 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003746 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3747 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003748 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3749 // Instant AEC settled for this frame.
3750 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3751 mInstantAECSettledFrameNumber = urgent_frame_number;
3752 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003753 break;
3754 }
3755 }
3756 }
3757
3758 if (!frame_number_valid) {
3759 LOGD("Not a valid normal frame number, used as SOF only");
3760 if (free_and_bufdone_meta_buf) {
3761 mMetadataChannel->bufDone(metadata_buf);
3762 free(metadata_buf);
3763 }
3764 goto done_metadata;
3765 }
3766 LOGH("valid frame_number = %u, capture_time = %lld",
3767 frame_number, capture_time);
3768
Emilian Peev4e0fe952017-06-30 12:40:09 -07003769 handleDepthDataLocked(metadata->depth_data, frame_number,
3770 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003771
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003772 // Check whether any stream buffer corresponding to this is dropped or not
3773 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3774 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3775 for (auto & pendingRequest : mPendingRequestsList) {
3776 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3777 mInstantAECSettledFrameNumber)) {
3778 camera3_notify_msg_t notify_msg = {};
3779 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003780 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003781 QCamera3ProcessingChannel *channel =
3782 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003783 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003784 if (p_cam_frame_drop) {
3785 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003786 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003787 // Got the stream ID for drop frame.
3788 dropFrame = true;
3789 break;
3790 }
3791 }
3792 } else {
3793 // This is instant AEC case.
3794 // For instant AEC drop the stream untill AEC is settled.
3795 dropFrame = true;
3796 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003797
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003798 if (dropFrame) {
3799 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3800 if (p_cam_frame_drop) {
3801 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003802 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003803 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003804 } else {
3805 // For instant AEC, inform frame drop and frame number
3806 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3807 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003808 pendingRequest.frame_number, streamID,
3809 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003810 }
3811 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003812 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003813 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003814 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003815 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003816 if (p_cam_frame_drop) {
3817 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003818 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003819 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003820 } else {
3821 // For instant AEC, inform frame drop and frame number
3822 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3823 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003824 pendingRequest.frame_number, streamID,
3825 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003826 }
3827 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003828 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003829 PendingFrameDrop.stream_ID = streamID;
3830 // Add the Frame drop info to mPendingFrameDropList
3831 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003832 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003833 }
3834 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003835 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003836
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003837 for (auto & pendingRequest : mPendingRequestsList) {
3838 // Find the pending request with the frame number.
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07003839 if (pendingRequest.frame_number < frame_number) {
3840 // Workaround for case where shutter is missing due to dropped
3841 // metadata
Emilian Peev7b0175d2017-09-29 12:57:31 +01003842 if (!pendingRequest.hdrplus && (pendingRequest.input_buffer == nullptr)) {
Chien-Yu Chen0469c9b2017-09-22 13:22:19 -07003843 mShutterDispatcher.markShutterReady(pendingRequest.frame_number, capture_time);
3844 }
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07003845 } else if (pendingRequest.frame_number == frame_number) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003846 // Update the sensor timestamp.
3847 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003848
Thierry Strudel3d639192016-09-09 11:52:26 -07003849
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003850 /* Set the timestamp in display metadata so that clients aware of
3851 private_handle such as VT can use this un-modified timestamps.
3852 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003853 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003854
Thierry Strudel3d639192016-09-09 11:52:26 -07003855 // Find channel requiring metadata, meaning internal offline postprocess
3856 // is needed.
3857 //TODO: for now, we don't support two streams requiring metadata at the same time.
3858 // (because we are not making copies, and metadata buffer is not reference counted.
3859 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003860 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3861 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003862 if (iter->need_metadata) {
3863 internalPproc = true;
3864 QCamera3ProcessingChannel *channel =
3865 (QCamera3ProcessingChannel *)iter->stream->priv;
3866 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003867 if(p_is_metabuf_queued != NULL) {
3868 *p_is_metabuf_queued = true;
3869 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003870 break;
3871 }
3872 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003873 for (auto itr = pendingRequest.internalRequestList.begin();
3874 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003875 if (itr->need_metadata) {
3876 internalPproc = true;
3877 QCamera3ProcessingChannel *channel =
3878 (QCamera3ProcessingChannel *)itr->stream->priv;
3879 channel->queueReprocMetadata(metadata_buf);
3880 break;
3881 }
3882 }
3883
Thierry Strudel54dc9782017-02-15 12:12:10 -08003884 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003885
3886 bool *enableZsl = nullptr;
3887 if (gExposeEnableZslKey) {
3888 enableZsl = &pendingRequest.enableZsl;
3889 }
3890
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003891 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003892 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003893 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003894
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003895 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003896
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003897 if (pendingRequest.blob_request) {
3898 //Dump tuning metadata if enabled and available
3899 char prop[PROPERTY_VALUE_MAX];
3900 memset(prop, 0, sizeof(prop));
3901 property_get("persist.camera.dumpmetadata", prop, "0");
3902 int32_t enabled = atoi(prop);
3903 if (enabled && metadata->is_tuning_params_valid) {
3904 dumpMetadataToFile(metadata->tuning_params,
3905 mMetaFrameCount,
3906 enabled,
3907 "Snapshot",
3908 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003909 }
3910 }
3911
3912 if (!internalPproc) {
3913 LOGD("couldn't find need_metadata for this metadata");
3914 // Return metadata buffer
3915 if (free_and_bufdone_meta_buf) {
3916 mMetadataChannel->bufDone(metadata_buf);
3917 free(metadata_buf);
3918 }
3919 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003920
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003921 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003922 }
3923 }
3924
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003925 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3926
3927 // Try to send out capture result metadata.
3928 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003929 return;
3930
Thierry Strudel3d639192016-09-09 11:52:26 -07003931done_metadata:
3932 for (pendingRequestIterator i = mPendingRequestsList.begin();
3933 i != mPendingRequestsList.end() ;i++) {
3934 i->pipeline_depth++;
3935 }
3936 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3937 unblockRequestIfNecessary();
3938}
3939
3940/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003941 * FUNCTION : handleDepthDataWithLock
3942 *
3943 * DESCRIPTION: Handles incoming depth data
3944 *
3945 * PARAMETERS : @depthData : Depth data
3946 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003947 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003948 *
3949 * RETURN :
3950 *
3951 *==========================================================================*/
3952void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003953 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003954 uint32_t currentFrameNumber;
3955 buffer_handle_t *depthBuffer;
3956
3957 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003958 return;
3959 }
3960
3961 camera3_stream_buffer_t resultBuffer =
3962 {.acquire_fence = -1,
3963 .release_fence = -1,
3964 .status = CAMERA3_BUFFER_STATUS_OK,
3965 .buffer = nullptr,
3966 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003967 do {
3968 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3969 if (nullptr == depthBuffer) {
3970 break;
3971 }
3972
Emilian Peev7650c122017-01-19 08:24:33 -08003973 resultBuffer.buffer = depthBuffer;
3974 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003975 if (valid) {
3976 int32_t rc = mDepthChannel->populateDepthData(depthData,
3977 frameNumber);
3978 if (NO_ERROR != rc) {
3979 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3980 } else {
3981 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3982 }
Emilian Peev7650c122017-01-19 08:24:33 -08003983 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003984 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003985 }
3986 } else if (currentFrameNumber > frameNumber) {
3987 break;
3988 } else {
3989 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3990 {{currentFrameNumber, mDepthChannel->getStream(),
3991 CAMERA3_MSG_ERROR_BUFFER}}};
3992 orchestrateNotify(&notify_msg);
3993
3994 LOGE("Depth buffer for frame number: %d is missing "
3995 "returning back!", currentFrameNumber);
3996 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3997 }
3998 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003999 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004000 } while (currentFrameNumber < frameNumber);
4001}
4002
4003/*===========================================================================
4004 * FUNCTION : notifyErrorFoPendingDepthData
4005 *
4006 * DESCRIPTION: Returns error for any pending depth buffers
4007 *
4008 * PARAMETERS : depthCh - depth channel that needs to get flushed
4009 *
4010 * RETURN :
4011 *
4012 *==========================================================================*/
4013void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
4014 QCamera3DepthChannel *depthCh) {
4015 uint32_t currentFrameNumber;
4016 buffer_handle_t *depthBuffer;
4017
4018 if (nullptr == depthCh) {
4019 return;
4020 }
4021
4022 camera3_notify_msg_t notify_msg =
4023 {.type = CAMERA3_MSG_ERROR,
4024 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4025 camera3_stream_buffer_t resultBuffer =
4026 {.acquire_fence = -1,
4027 .release_fence = -1,
4028 .buffer = nullptr,
4029 .stream = depthCh->getStream(),
4030 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004031
4032 while (nullptr !=
4033 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4034 depthCh->unmapBuffer(currentFrameNumber);
4035
4036 notify_msg.message.error.frame_number = currentFrameNumber;
4037 orchestrateNotify(&notify_msg);
4038
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004039 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004040 };
4041}
4042
4043/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004044 * FUNCTION : hdrPlusPerfLock
4045 *
4046 * DESCRIPTION: perf lock for HDR+ using custom intent
4047 *
4048 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4049 *
4050 * RETURN : None
4051 *
4052 *==========================================================================*/
4053void QCamera3HardwareInterface::hdrPlusPerfLock(
4054 mm_camera_super_buf_t *metadata_buf)
4055{
4056 if (NULL == metadata_buf) {
4057 LOGE("metadata_buf is NULL");
4058 return;
4059 }
4060 metadata_buffer_t *metadata =
4061 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4062 int32_t *p_frame_number_valid =
4063 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4064 uint32_t *p_frame_number =
4065 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4066
4067 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4068 LOGE("%s: Invalid metadata", __func__);
4069 return;
4070 }
4071
Wei Wang01385482017-08-03 10:49:34 -07004072 //acquire perf lock for 2 secs after the last HDR frame is captured
4073 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004074 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4075 if ((p_frame_number != NULL) &&
4076 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004077 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004078 }
4079 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004080}
4081
4082/*===========================================================================
4083 * FUNCTION : handleInputBufferWithLock
4084 *
4085 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4086 *
4087 * PARAMETERS : @frame_number: frame number of the input buffer
4088 *
4089 * RETURN :
4090 *
4091 *==========================================================================*/
4092void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4093{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004094 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004095 pendingRequestIterator i = mPendingRequestsList.begin();
4096 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4097 i++;
4098 }
4099 if (i != mPendingRequestsList.end() && i->input_buffer) {
4100 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004101 CameraMetadata settings;
4102 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4103 if(i->settings) {
4104 settings = i->settings;
4105 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4106 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004107 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004108 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004109 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004110 } else {
4111 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004112 }
4113
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004114 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4115 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4116 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004117
4118 camera3_capture_result result;
4119 memset(&result, 0, sizeof(camera3_capture_result));
4120 result.frame_number = frame_number;
4121 result.result = i->settings;
4122 result.input_buffer = i->input_buffer;
4123 result.partial_result = PARTIAL_RESULT_COUNT;
4124
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004125 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004126 LOGD("Input request metadata and input buffer frame_number = %u",
4127 i->frame_number);
4128 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004129
4130 // Dispatch result metadata that may be just unblocked by this reprocess result.
4131 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004132 } else {
4133 LOGE("Could not find input request for frame number %d", frame_number);
4134 }
4135}
4136
4137/*===========================================================================
4138 * FUNCTION : handleBufferWithLock
4139 *
4140 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4141 *
4142 * PARAMETERS : @buffer: image buffer for the callback
4143 * @frame_number: frame number of the image buffer
4144 *
4145 * RETURN :
4146 *
4147 *==========================================================================*/
4148void QCamera3HardwareInterface::handleBufferWithLock(
4149 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4150{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004151 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004152
4153 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4154 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4155 }
4156
Thierry Strudel3d639192016-09-09 11:52:26 -07004157 /* Nothing to be done during error state */
4158 if ((ERROR == mState) || (DEINIT == mState)) {
4159 return;
4160 }
4161 if (mFlushPerf) {
4162 handleBuffersDuringFlushLock(buffer);
4163 return;
4164 }
4165 //not in flush
4166 // If the frame number doesn't exist in the pending request list,
4167 // directly send the buffer to the frameworks, and update pending buffers map
4168 // Otherwise, book-keep the buffer.
4169 pendingRequestIterator i = mPendingRequestsList.begin();
4170 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4171 i++;
4172 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004173
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004174 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004175 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004176 // For a reprocessing request, try to send out result metadata.
4177 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004178 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004179 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004180
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004181 // Check if this frame was dropped.
4182 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4183 m != mPendingFrameDropList.end(); m++) {
4184 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4185 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4186 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4187 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4188 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4189 frame_number, streamID);
4190 m = mPendingFrameDropList.erase(m);
4191 break;
4192 }
4193 }
4194
Binhao Lin09245482017-08-31 18:25:29 -07004195 // WAR for encoder avtimer timestamp issue
4196 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4197 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
4198 m_bAVTimerEnabled) {
4199 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
4200 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
4201 if (req->frame_number != frame_number)
4202 continue;
4203 if(req->av_timestamp == 0) {
4204 buffer->status |= CAMERA3_BUFFER_STATUS_ERROR;
4205 }
4206 else {
4207 struct private_handle_t *priv_handle =
4208 (struct private_handle_t *) (*(buffer->buffer));
4209 setMetaData(priv_handle, SET_VT_TIMESTAMP, &(req->av_timestamp));
4210 }
4211 }
4212 }
4213
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004214 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4215 LOGH("result frame_number = %d, buffer = %p",
4216 frame_number, buffer->buffer);
4217
4218 mPendingBuffersMap.removeBuf(buffer->buffer);
4219 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4220
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004221 if (mPreviewStarted == false) {
4222 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4223 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004224 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4225
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004226 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4227 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4228 mPreviewStarted = true;
4229
4230 // Set power hint for preview
4231 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4232 }
4233 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004234}
4235
Chien-Yu Chen21b9e9a2017-09-25 14:34:26 -07004236void QCamera3HardwareInterface::removeUnrequestedMetadata(pendingRequestIterator requestIter,
4237 camera_metadata_t *resultMetadata) {
4238 CameraMetadata metadata;
4239 metadata.acquire(resultMetadata);
4240
4241 // Remove len shading map if it's not requested.
4242 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF &&
4243 metadata.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE) &&
4244 metadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF).data.u8[0] !=
4245 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4246 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4247 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4248 &requestIter->requestedLensShadingMapMode, 1);
4249 }
4250
4251 // Remove face information if it's not requested.
4252 if (requestIter->requestedFaceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_OFF &&
4253 metadata.exists(ANDROID_STATISTICS_FACE_DETECT_MODE) &&
4254 metadata.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0] !=
4255 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4256 metadata.erase(ANDROID_STATISTICS_FACE_RECTANGLES);
4257 metadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE,
4258 &requestIter->requestedFaceDetectMode, 1);
4259 }
4260
4261 requestIter->resultMetadata = metadata.release();
4262}
4263
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004264void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004265 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004266{
4267 // Find the pending request for this result metadata.
4268 auto requestIter = mPendingRequestsList.begin();
4269 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4270 requestIter++;
4271 }
4272
4273 if (requestIter == mPendingRequestsList.end()) {
4274 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4275 return;
4276 }
4277
4278 // Update the result metadata
4279 requestIter->resultMetadata = resultMetadata;
4280
4281 // Check what type of request this is.
4282 bool liveRequest = false;
4283 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004284 // HDR+ request doesn't have partial results.
4285 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004286 } else if (requestIter->input_buffer != nullptr) {
4287 // Reprocessing request result is the same as settings.
4288 requestIter->resultMetadata = requestIter->settings;
4289 // Reprocessing request doesn't have partial results.
4290 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4291 } else {
4292 liveRequest = true;
Chien-Yu Chen0a921f92017-08-27 17:25:33 -07004293 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004294 mPendingLiveRequest--;
4295
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004296 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004297 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004298 // For a live request, send the metadata to HDR+ client.
4299 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4300 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4301 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4302 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004303 }
4304 }
4305
Chien-Yu Chen21b9e9a2017-09-25 14:34:26 -07004306 if (requestIter->input_buffer == nullptr) {
4307 removeUnrequestedMetadata(requestIter, resultMetadata);
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004308 }
4309
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004310 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4311}
4312
4313void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4314 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004315 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4316 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004317 bool readyToSend = true;
4318
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004319 // Iterate through the pending requests to send out result metadata that are ready. Also if
4320 // this result metadata belongs to a live request, notify errors for previous live requests
4321 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004322 auto iter = mPendingRequestsList.begin();
4323 while (iter != mPendingRequestsList.end()) {
4324 // Check if current pending request is ready. If it's not ready, the following pending
4325 // requests are also not ready.
4326 if (readyToSend && iter->resultMetadata == nullptr) {
4327 readyToSend = false;
4328 }
4329
4330 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07004331 bool errorResult = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004332
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004333 camera3_capture_result_t result = {};
4334 result.frame_number = iter->frame_number;
4335 result.result = iter->resultMetadata;
4336 result.partial_result = iter->partial_result_cnt;
4337
4338 // If this pending buffer has result metadata, we may be able to send out shutter callback
4339 // and result metadata.
4340 if (iter->resultMetadata != nullptr) {
4341 if (!readyToSend) {
4342 // If any of the previous pending request is not ready, this pending request is
4343 // also not ready to send in order to keep shutter callbacks and result metadata
4344 // in order.
4345 iter++;
4346 continue;
4347 }
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07004348 // Notify ERROR_RESULT if partial result was dropped.
4349 errorResult = iter->partialResultDropped;
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004350 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004351 // If the result metadata belongs to a live request, notify errors for previous pending
4352 // live requests.
4353 mPendingLiveRequest--;
4354
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07004355 LOGE("Error: HAL missed metadata for frame number %d", iter->frame_number);
4356 errorResult = true;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004357 } else {
4358 iter++;
4359 continue;
4360 }
4361
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07004362 if (errorResult) {
4363 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
4364 } else {
4365 result.output_buffers = nullptr;
4366 result.num_output_buffers = 0;
4367 orchestrateResult(&result);
4368 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004369 // For reprocessing, result metadata is the same as settings so do not free it here to
4370 // avoid double free.
4371 if (result.result != iter->settings) {
4372 free_camera_metadata((camera_metadata_t *)result.result);
4373 }
4374 iter->resultMetadata = nullptr;
4375 iter = erasePendingRequest(iter);
4376 }
4377
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004378 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004379 for (auto &iter : mPendingRequestsList) {
4380 // Increment pipeline depth for the following pending requests.
4381 if (iter.frame_number > frameNumber) {
4382 iter.pipeline_depth++;
4383 }
4384 }
4385 }
4386
4387 unblockRequestIfNecessary();
4388}
4389
Thierry Strudel3d639192016-09-09 11:52:26 -07004390/*===========================================================================
4391 * FUNCTION : unblockRequestIfNecessary
4392 *
4393 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4394 * that mMutex is held when this function is called.
4395 *
4396 * PARAMETERS :
4397 *
4398 * RETURN :
4399 *
4400 *==========================================================================*/
4401void QCamera3HardwareInterface::unblockRequestIfNecessary()
4402{
4403 // Unblock process_capture_request
4404 pthread_cond_signal(&mRequestCond);
4405}
4406
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004407/*===========================================================================
4408 * FUNCTION : isHdrSnapshotRequest
4409 *
4410 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4411 *
4412 * PARAMETERS : camera3 request structure
4413 *
4414 * RETURN : boolean decision variable
4415 *
4416 *==========================================================================*/
4417bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4418{
4419 if (request == NULL) {
4420 LOGE("Invalid request handle");
4421 assert(0);
4422 return false;
4423 }
4424
4425 if (!mForceHdrSnapshot) {
4426 CameraMetadata frame_settings;
4427 frame_settings = request->settings;
4428
4429 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4430 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4431 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4432 return false;
4433 }
4434 } else {
4435 return false;
4436 }
4437
4438 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4439 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4440 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4441 return false;
4442 }
4443 } else {
4444 return false;
4445 }
4446 }
4447
4448 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4449 if (request->output_buffers[i].stream->format
4450 == HAL_PIXEL_FORMAT_BLOB) {
4451 return true;
4452 }
4453 }
4454
4455 return false;
4456}
4457/*===========================================================================
4458 * FUNCTION : orchestrateRequest
4459 *
4460 * DESCRIPTION: Orchestrates a capture request from camera service
4461 *
4462 * PARAMETERS :
4463 * @request : request from framework to process
4464 *
4465 * RETURN : Error status codes
4466 *
4467 *==========================================================================*/
4468int32_t QCamera3HardwareInterface::orchestrateRequest(
4469 camera3_capture_request_t *request)
4470{
4471
4472 uint32_t originalFrameNumber = request->frame_number;
4473 uint32_t originalOutputCount = request->num_output_buffers;
4474 const camera_metadata_t *original_settings = request->settings;
4475 List<InternalRequest> internallyRequestedStreams;
4476 List<InternalRequest> emptyInternalList;
4477
4478 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4479 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4480 uint32_t internalFrameNumber;
4481 CameraMetadata modified_meta;
4482
4483
4484 /* Add Blob channel to list of internally requested streams */
4485 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4486 if (request->output_buffers[i].stream->format
4487 == HAL_PIXEL_FORMAT_BLOB) {
4488 InternalRequest streamRequested;
4489 streamRequested.meteringOnly = 1;
4490 streamRequested.need_metadata = 0;
4491 streamRequested.stream = request->output_buffers[i].stream;
4492 internallyRequestedStreams.push_back(streamRequested);
4493 }
4494 }
4495 request->num_output_buffers = 0;
4496 auto itr = internallyRequestedStreams.begin();
4497
4498 /* Modify setting to set compensation */
4499 modified_meta = request->settings;
4500 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4501 uint8_t aeLock = 1;
4502 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4503 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4504 camera_metadata_t *modified_settings = modified_meta.release();
4505 request->settings = modified_settings;
4506
4507 /* Capture Settling & -2x frame */
4508 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4509 request->frame_number = internalFrameNumber;
4510 processCaptureRequest(request, internallyRequestedStreams);
4511
4512 request->num_output_buffers = originalOutputCount;
4513 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4514 request->frame_number = internalFrameNumber;
4515 processCaptureRequest(request, emptyInternalList);
4516 request->num_output_buffers = 0;
4517
4518 modified_meta = modified_settings;
4519 expCompensation = 0;
4520 aeLock = 1;
4521 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4522 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4523 modified_settings = modified_meta.release();
4524 request->settings = modified_settings;
4525
4526 /* Capture Settling & 0X frame */
4527
4528 itr = internallyRequestedStreams.begin();
4529 if (itr == internallyRequestedStreams.end()) {
4530 LOGE("Error Internally Requested Stream list is empty");
4531 assert(0);
4532 } else {
4533 itr->need_metadata = 0;
4534 itr->meteringOnly = 1;
4535 }
4536
4537 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4538 request->frame_number = internalFrameNumber;
4539 processCaptureRequest(request, internallyRequestedStreams);
4540
4541 itr = internallyRequestedStreams.begin();
4542 if (itr == internallyRequestedStreams.end()) {
4543 ALOGE("Error Internally Requested Stream list is empty");
4544 assert(0);
4545 } else {
4546 itr->need_metadata = 1;
4547 itr->meteringOnly = 0;
4548 }
4549
4550 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4551 request->frame_number = internalFrameNumber;
4552 processCaptureRequest(request, internallyRequestedStreams);
4553
4554 /* Capture 2X frame*/
4555 modified_meta = modified_settings;
4556 expCompensation = GB_HDR_2X_STEP_EV;
4557 aeLock = 1;
4558 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4559 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4560 modified_settings = modified_meta.release();
4561 request->settings = modified_settings;
4562
4563 itr = internallyRequestedStreams.begin();
4564 if (itr == internallyRequestedStreams.end()) {
4565 ALOGE("Error Internally Requested Stream list is empty");
4566 assert(0);
4567 } else {
4568 itr->need_metadata = 0;
4569 itr->meteringOnly = 1;
4570 }
4571 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4572 request->frame_number = internalFrameNumber;
4573 processCaptureRequest(request, internallyRequestedStreams);
4574
4575 itr = internallyRequestedStreams.begin();
4576 if (itr == internallyRequestedStreams.end()) {
4577 ALOGE("Error Internally Requested Stream list is empty");
4578 assert(0);
4579 } else {
4580 itr->need_metadata = 1;
4581 itr->meteringOnly = 0;
4582 }
4583
4584 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4585 request->frame_number = internalFrameNumber;
4586 processCaptureRequest(request, internallyRequestedStreams);
4587
4588
4589 /* Capture 2X on original streaming config*/
4590 internallyRequestedStreams.clear();
4591
4592 /* Restore original settings pointer */
4593 request->settings = original_settings;
4594 } else {
4595 uint32_t internalFrameNumber;
4596 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4597 request->frame_number = internalFrameNumber;
4598 return processCaptureRequest(request, internallyRequestedStreams);
4599 }
4600
4601 return NO_ERROR;
4602}
4603
4604/*===========================================================================
4605 * FUNCTION : orchestrateResult
4606 *
4607 * DESCRIPTION: Orchestrates a capture result to camera service
4608 *
4609 * PARAMETERS :
4610 * @request : request from framework to process
4611 *
4612 * RETURN :
4613 *
4614 *==========================================================================*/
4615void QCamera3HardwareInterface::orchestrateResult(
4616 camera3_capture_result_t *result)
4617{
4618 uint32_t frameworkFrameNumber;
4619 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4620 frameworkFrameNumber);
4621 if (rc != NO_ERROR) {
4622 LOGE("Cannot find translated frameworkFrameNumber");
4623 assert(0);
4624 } else {
4625 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004626 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004627 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004628 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004629 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4630 camera_metadata_entry_t entry;
4631 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4632 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004633 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004634 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4635 if (ret != OK)
4636 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004637 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004638 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004639 result->frame_number = frameworkFrameNumber;
4640 mCallbackOps->process_capture_result(mCallbackOps, result);
4641 }
4642 }
4643}
4644
4645/*===========================================================================
4646 * FUNCTION : orchestrateNotify
4647 *
4648 * DESCRIPTION: Orchestrates a notify to camera service
4649 *
4650 * PARAMETERS :
4651 * @request : request from framework to process
4652 *
4653 * RETURN :
4654 *
4655 *==========================================================================*/
4656void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4657{
4658 uint32_t frameworkFrameNumber;
4659 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004660 int32_t rc = NO_ERROR;
4661
4662 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004663 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004664
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004665 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004666 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4667 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4668 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004669 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004670 LOGE("Cannot find translated frameworkFrameNumber");
4671 assert(0);
4672 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004673 }
4674 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004675
4676 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4677 LOGD("Internal Request drop the notifyCb");
4678 } else {
4679 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4680 mCallbackOps->notify(mCallbackOps, notify_msg);
4681 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004682}
4683
4684/*===========================================================================
4685 * FUNCTION : FrameNumberRegistry
4686 *
4687 * DESCRIPTION: Constructor
4688 *
4689 * PARAMETERS :
4690 *
4691 * RETURN :
4692 *
4693 *==========================================================================*/
4694FrameNumberRegistry::FrameNumberRegistry()
4695{
4696 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4697}
4698
4699/*===========================================================================
4700 * FUNCTION : ~FrameNumberRegistry
4701 *
4702 * DESCRIPTION: Destructor
4703 *
4704 * PARAMETERS :
4705 *
4706 * RETURN :
4707 *
4708 *==========================================================================*/
4709FrameNumberRegistry::~FrameNumberRegistry()
4710{
4711}
4712
4713/*===========================================================================
4714 * FUNCTION : PurgeOldEntriesLocked
4715 *
4716 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4717 *
4718 * PARAMETERS :
4719 *
4720 * RETURN : NONE
4721 *
4722 *==========================================================================*/
4723void FrameNumberRegistry::purgeOldEntriesLocked()
4724{
4725 while (_register.begin() != _register.end()) {
4726 auto itr = _register.begin();
4727 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4728 _register.erase(itr);
4729 } else {
4730 return;
4731 }
4732 }
4733}
4734
4735/*===========================================================================
4736 * FUNCTION : allocStoreInternalFrameNumber
4737 *
4738 * DESCRIPTION: Method to note down a framework request and associate a new
4739 * internal request number against it
4740 *
4741 * PARAMETERS :
4742 * @fFrameNumber: Identifier given by framework
4743 * @internalFN : Output parameter which will have the newly generated internal
4744 * entry
4745 *
4746 * RETURN : Error code
4747 *
4748 *==========================================================================*/
4749int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4750 uint32_t &internalFrameNumber)
4751{
4752 Mutex::Autolock lock(mRegistryLock);
4753 internalFrameNumber = _nextFreeInternalNumber++;
4754 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4755 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4756 purgeOldEntriesLocked();
4757 return NO_ERROR;
4758}
4759
4760/*===========================================================================
4761 * FUNCTION : generateStoreInternalFrameNumber
4762 *
4763 * DESCRIPTION: Method to associate a new internal request number independent
4764 * of any associate with framework requests
4765 *
4766 * PARAMETERS :
4767 * @internalFrame#: Output parameter which will have the newly generated internal
4768 *
4769 *
4770 * RETURN : Error code
4771 *
4772 *==========================================================================*/
4773int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4774{
4775 Mutex::Autolock lock(mRegistryLock);
4776 internalFrameNumber = _nextFreeInternalNumber++;
4777 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4778 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4779 purgeOldEntriesLocked();
4780 return NO_ERROR;
4781}
4782
4783/*===========================================================================
4784 * FUNCTION : getFrameworkFrameNumber
4785 *
4786 * DESCRIPTION: Method to query the framework framenumber given an internal #
4787 *
4788 * PARAMETERS :
4789 * @internalFrame#: Internal reference
4790 * @frameworkframenumber: Output parameter holding framework frame entry
4791 *
4792 * RETURN : Error code
4793 *
4794 *==========================================================================*/
4795int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4796 uint32_t &frameworkFrameNumber)
4797{
4798 Mutex::Autolock lock(mRegistryLock);
4799 auto itr = _register.find(internalFrameNumber);
4800 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004801 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004802 return -ENOENT;
4803 }
4804
4805 frameworkFrameNumber = itr->second;
4806 purgeOldEntriesLocked();
4807 return NO_ERROR;
4808}
Thierry Strudel3d639192016-09-09 11:52:26 -07004809
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004810status_t QCamera3HardwareInterface::fillPbStreamConfig(
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004811 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, QCamera3Channel *channel,
4812 uint32_t streamIndex) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004813 if (config == nullptr) {
4814 LOGE("%s: config is null", __FUNCTION__);
4815 return BAD_VALUE;
4816 }
4817
4818 if (channel == nullptr) {
4819 LOGE("%s: channel is null", __FUNCTION__);
4820 return BAD_VALUE;
4821 }
4822
4823 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4824 if (stream == nullptr) {
4825 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4826 return NAME_NOT_FOUND;
4827 }
4828
4829 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4830 if (streamInfo == nullptr) {
4831 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4832 return NAME_NOT_FOUND;
4833 }
4834
4835 config->id = pbStreamId;
4836 config->image.width = streamInfo->dim.width;
4837 config->image.height = streamInfo->dim.height;
4838 config->image.padding = 0;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004839
4840 int bytesPerPixel = 0;
4841
4842 switch (streamInfo->fmt) {
4843 case CAM_FORMAT_YUV_420_NV21:
4844 config->image.format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
4845 bytesPerPixel = 1;
4846 break;
4847 case CAM_FORMAT_YUV_420_NV12:
4848 case CAM_FORMAT_YUV_420_NV12_VENUS:
4849 config->image.format = HAL_PIXEL_FORMAT_YCbCr_420_SP;
4850 bytesPerPixel = 1;
4851 break;
4852 default:
4853 ALOGE("%s: Stream format %d not supported.", __FUNCTION__, streamInfo->fmt);
4854 return BAD_VALUE;
4855 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004856
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004857 uint32_t totalPlaneSize = 0;
4858
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004859 // Fill plane information.
4860 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4861 pbcamera::PlaneConfiguration plane;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004862 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride * bytesPerPixel;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004863 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4864 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004865
4866 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004867 }
4868
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004869 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004870 return OK;
4871}
4872
Thierry Strudel3d639192016-09-09 11:52:26 -07004873/*===========================================================================
4874 * FUNCTION : processCaptureRequest
4875 *
4876 * DESCRIPTION: process a capture request from camera service
4877 *
4878 * PARAMETERS :
4879 * @request : request from framework to process
4880 *
4881 * RETURN :
4882 *
4883 *==========================================================================*/
4884int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004885 camera3_capture_request_t *request,
4886 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004887{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004888 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004889 int rc = NO_ERROR;
4890 int32_t request_id;
4891 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004892 bool isVidBufRequested = false;
4893 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004894 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004895
4896 pthread_mutex_lock(&mMutex);
4897
4898 // Validate current state
4899 switch (mState) {
4900 case CONFIGURED:
4901 case STARTED:
4902 /* valid state */
4903 break;
4904
4905 case ERROR:
4906 pthread_mutex_unlock(&mMutex);
4907 handleCameraDeviceError();
4908 return -ENODEV;
4909
4910 default:
4911 LOGE("Invalid state %d", mState);
4912 pthread_mutex_unlock(&mMutex);
4913 return -ENODEV;
4914 }
4915
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004916 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004917 if (rc != NO_ERROR) {
4918 LOGE("incoming request is not valid");
4919 pthread_mutex_unlock(&mMutex);
4920 return rc;
4921 }
4922
4923 meta = request->settings;
4924
4925 // For first capture request, send capture intent, and
4926 // stream on all streams
4927 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004928 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004929 // send an unconfigure to the backend so that the isp
4930 // resources are deallocated
4931 if (!mFirstConfiguration) {
4932 cam_stream_size_info_t stream_config_info;
4933 int32_t hal_version = CAM_HAL_V3;
4934 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4935 stream_config_info.buffer_info.min_buffers =
4936 MIN_INFLIGHT_REQUESTS;
4937 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004938 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004939 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004940 clear_metadata_buffer(mParameters);
4941 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4942 CAM_INTF_PARM_HAL_VERSION, hal_version);
4943 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4944 CAM_INTF_META_STREAM_INFO, stream_config_info);
4945 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4946 mParameters);
4947 if (rc < 0) {
4948 LOGE("set_parms for unconfigure failed");
4949 pthread_mutex_unlock(&mMutex);
4950 return rc;
4951 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004952
Thierry Strudel3d639192016-09-09 11:52:26 -07004953 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004954 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004955 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004956 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004957 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004958 property_get("persist.camera.is_type", is_type_value, "4");
4959 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4960 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4961 property_get("persist.camera.is_type_preview", is_type_value, "4");
4962 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4963 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004964
4965 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4966 int32_t hal_version = CAM_HAL_V3;
4967 uint8_t captureIntent =
4968 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4969 mCaptureIntent = captureIntent;
4970 clear_metadata_buffer(mParameters);
4971 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4972 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4973 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004974 if (mFirstConfiguration) {
4975 // configure instant AEC
4976 // Instant AEC is a session based parameter and it is needed only
4977 // once per complete session after open camera.
4978 // i.e. This is set only once for the first capture request, after open camera.
4979 setInstantAEC(meta);
4980 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004981 uint8_t fwkVideoStabMode=0;
4982 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4983 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4984 }
4985
Xue Tuecac74e2017-04-17 13:58:15 -07004986 // If EIS setprop is enabled then only turn it on for video/preview
4987 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004988 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004989 int32_t vsMode;
4990 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4991 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4992 rc = BAD_VALUE;
4993 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004994 LOGD("setEis %d", setEis);
4995 bool eis3Supported = false;
4996 size_t count = IS_TYPE_MAX;
4997 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4998 for (size_t i = 0; i < count; i++) {
4999 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
5000 eis3Supported = true;
5001 break;
5002 }
5003 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005004
5005 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005006 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07005007 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5008 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005009 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
5010 is_type = isTypePreview;
5011 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
5012 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
5013 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07005014 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005015 } else {
5016 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07005017 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005018 } else {
5019 is_type = IS_TYPE_NONE;
5020 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005021 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005022 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005023 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
5024 }
5025 }
5026
5027 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5028 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
5029
Thierry Strudel54dc9782017-02-15 12:12:10 -08005030 //Disable tintless only if the property is set to 0
5031 memset(prop, 0, sizeof(prop));
5032 property_get("persist.camera.tintless.enable", prop, "1");
5033 int32_t tintless_value = atoi(prop);
5034
Thierry Strudel3d639192016-09-09 11:52:26 -07005035 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5036 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08005037
Thierry Strudel3d639192016-09-09 11:52:26 -07005038 //Disable CDS for HFR mode or if DIS/EIS is on.
5039 //CDS is a session parameter in the backend/ISP, so need to be set/reset
5040 //after every configure_stream
5041 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
5042 (m_bIsVideo)) {
5043 int32_t cds = CAM_CDS_MODE_OFF;
5044 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5045 CAM_INTF_PARM_CDS_MODE, cds))
5046 LOGE("Failed to disable CDS for HFR mode");
5047
5048 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005049
5050 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
5051 uint8_t* use_av_timer = NULL;
5052
5053 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005054 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005055 use_av_timer = &m_debug_avtimer;
Binhao Lin09245482017-08-31 18:25:29 -07005056 m_bAVTimerEnabled = true;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005057 }
5058 else{
5059 use_av_timer =
5060 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005061 if (use_av_timer) {
Binhao Lin09245482017-08-31 18:25:29 -07005062 m_bAVTimerEnabled = true;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005063 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
5064 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005065 }
5066
5067 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
5068 rc = BAD_VALUE;
5069 }
5070 }
5071
Thierry Strudel3d639192016-09-09 11:52:26 -07005072 setMobicat();
5073
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005074 uint8_t nrMode = 0;
5075 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
5076 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5077 }
5078
Thierry Strudel3d639192016-09-09 11:52:26 -07005079 /* Set fps and hfr mode while sending meta stream info so that sensor
5080 * can configure appropriate streaming mode */
5081 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005082 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5083 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005084 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5085 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005086 if (rc == NO_ERROR) {
5087 int32_t max_fps =
5088 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005089 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005090 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5091 }
5092 /* For HFR, more buffers are dequeued upfront to improve the performance */
5093 if (mBatchSize) {
5094 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5095 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5096 }
5097 }
5098 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005099 LOGE("setHalFpsRange failed");
5100 }
5101 }
5102 if (meta.exists(ANDROID_CONTROL_MODE)) {
5103 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5104 rc = extractSceneMode(meta, metaMode, mParameters);
5105 if (rc != NO_ERROR) {
5106 LOGE("extractSceneMode failed");
5107 }
5108 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005109 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005110
Thierry Strudel04e026f2016-10-10 11:27:36 -07005111 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5112 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5113 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5114 rc = setVideoHdrMode(mParameters, vhdr);
5115 if (rc != NO_ERROR) {
5116 LOGE("setVideoHDR is failed");
5117 }
5118 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005119
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005120 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005121 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005122 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005123 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5124 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5125 sensorModeFullFov)) {
5126 rc = BAD_VALUE;
5127 }
5128 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005129 //TODO: validate the arguments, HSV scenemode should have only the
5130 //advertised fps ranges
5131
5132 /*set the capture intent, hal version, tintless, stream info,
5133 *and disenable parameters to the backend*/
5134 LOGD("set_parms META_STREAM_INFO " );
5135 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005136 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5137 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005138 mStreamConfigInfo.type[i],
5139 mStreamConfigInfo.stream_sizes[i].width,
5140 mStreamConfigInfo.stream_sizes[i].height,
5141 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005142 mStreamConfigInfo.format[i],
5143 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005144 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005145
Thierry Strudel3d639192016-09-09 11:52:26 -07005146 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5147 mParameters);
5148 if (rc < 0) {
5149 LOGE("set_parms failed for hal version, stream info");
5150 }
5151
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005152 cam_sensor_mode_info_t sensorModeInfo = {};
5153 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005154 if (rc != NO_ERROR) {
5155 LOGE("Failed to get sensor output size");
5156 pthread_mutex_unlock(&mMutex);
5157 goto error_exit;
5158 }
5159
5160 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5161 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005162 sensorModeInfo.active_array_size.width,
5163 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005164
5165 /* Set batchmode before initializing channel. Since registerBuffer
5166 * internally initializes some of the channels, better set batchmode
5167 * even before first register buffer */
5168 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5169 it != mStreamInfo.end(); it++) {
5170 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5171 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5172 && mBatchSize) {
5173 rc = channel->setBatchSize(mBatchSize);
5174 //Disable per frame map unmap for HFR/batchmode case
5175 rc |= channel->setPerFrameMapUnmap(false);
5176 if (NO_ERROR != rc) {
5177 LOGE("Channel init failed %d", rc);
5178 pthread_mutex_unlock(&mMutex);
5179 goto error_exit;
5180 }
5181 }
5182 }
5183
5184 //First initialize all streams
5185 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5186 it != mStreamInfo.end(); it++) {
5187 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005188
5189 /* Initial value of NR mode is needed before stream on */
5190 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005191 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5192 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005193 setEis) {
5194 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5195 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5196 is_type = mStreamConfigInfo.is_type[i];
5197 break;
5198 }
5199 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005200 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005201 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005202 rc = channel->initialize(IS_TYPE_NONE);
5203 }
5204 if (NO_ERROR != rc) {
5205 LOGE("Channel initialization failed %d", rc);
5206 pthread_mutex_unlock(&mMutex);
5207 goto error_exit;
5208 }
5209 }
5210
5211 if (mRawDumpChannel) {
5212 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5213 if (rc != NO_ERROR) {
5214 LOGE("Error: Raw Dump Channel init failed");
5215 pthread_mutex_unlock(&mMutex);
5216 goto error_exit;
5217 }
5218 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005219 if (mHdrPlusRawSrcChannel) {
5220 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5221 if (rc != NO_ERROR) {
5222 LOGE("Error: HDR+ RAW Source Channel init failed");
5223 pthread_mutex_unlock(&mMutex);
5224 goto error_exit;
5225 }
5226 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005227 if (mSupportChannel) {
5228 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5229 if (rc < 0) {
5230 LOGE("Support channel initialization failed");
5231 pthread_mutex_unlock(&mMutex);
5232 goto error_exit;
5233 }
5234 }
5235 if (mAnalysisChannel) {
5236 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5237 if (rc < 0) {
5238 LOGE("Analysis channel initialization failed");
5239 pthread_mutex_unlock(&mMutex);
5240 goto error_exit;
5241 }
5242 }
5243 if (mDummyBatchChannel) {
5244 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5245 if (rc < 0) {
5246 LOGE("mDummyBatchChannel setBatchSize failed");
5247 pthread_mutex_unlock(&mMutex);
5248 goto error_exit;
5249 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005250 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005251 if (rc < 0) {
5252 LOGE("mDummyBatchChannel initialization failed");
5253 pthread_mutex_unlock(&mMutex);
5254 goto error_exit;
5255 }
5256 }
5257
5258 // Set bundle info
5259 rc = setBundleInfo();
5260 if (rc < 0) {
5261 LOGE("setBundleInfo failed %d", rc);
5262 pthread_mutex_unlock(&mMutex);
5263 goto error_exit;
5264 }
5265
5266 //update settings from app here
5267 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5268 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5269 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5270 }
5271 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5272 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5273 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5274 }
5275 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5276 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5277 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5278
5279 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5280 (mLinkedCameraId != mCameraId) ) {
5281 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5282 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005283 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005284 goto error_exit;
5285 }
5286 }
5287
5288 // add bundle related cameras
5289 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5290 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005291 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5292 &m_pDualCamCmdPtr->bundle_info;
5293 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005294 if (mIsDeviceLinked)
5295 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5296 else
5297 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5298
5299 pthread_mutex_lock(&gCamLock);
5300
5301 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5302 LOGE("Dualcam: Invalid Session Id ");
5303 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005304 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005305 goto error_exit;
5306 }
5307
5308 if (mIsMainCamera == 1) {
5309 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5310 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005311 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005312 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005313 // related session id should be session id of linked session
5314 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5315 } else {
5316 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5317 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005318 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005319 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005320 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5321 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005322 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005323 pthread_mutex_unlock(&gCamLock);
5324
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005325 rc = mCameraHandle->ops->set_dual_cam_cmd(
5326 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005327 if (rc < 0) {
5328 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005329 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005330 goto error_exit;
5331 }
5332 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005333 goto no_error;
5334error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005335 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005336 return rc;
5337no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005338 mWokenUpByDaemon = false;
5339 mPendingLiveRequest = 0;
5340 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005341 }
5342
5343 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005344 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005345
5346 if (mFlushPerf) {
5347 //we cannot accept any requests during flush
5348 LOGE("process_capture_request cannot proceed during flush");
5349 pthread_mutex_unlock(&mMutex);
5350 return NO_ERROR; //should return an error
5351 }
5352
5353 if (meta.exists(ANDROID_REQUEST_ID)) {
5354 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5355 mCurrentRequestId = request_id;
5356 LOGD("Received request with id: %d", request_id);
5357 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5358 LOGE("Unable to find request id field, \
5359 & no previous id available");
5360 pthread_mutex_unlock(&mMutex);
5361 return NAME_NOT_FOUND;
5362 } else {
5363 LOGD("Re-using old request id");
5364 request_id = mCurrentRequestId;
5365 }
5366
5367 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5368 request->num_output_buffers,
5369 request->input_buffer,
5370 frameNumber);
5371 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005372 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005373 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005374 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005375 uint32_t snapshotStreamId = 0;
5376 for (size_t i = 0; i < request->num_output_buffers; i++) {
5377 const camera3_stream_buffer_t& output = request->output_buffers[i];
5378 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5379
Emilian Peev7650c122017-01-19 08:24:33 -08005380 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5381 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005382 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005383 blob_request = 1;
5384 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5385 }
5386
5387 if (output.acquire_fence != -1) {
5388 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5389 close(output.acquire_fence);
5390 if (rc != OK) {
5391 LOGE("sync wait failed %d", rc);
5392 pthread_mutex_unlock(&mMutex);
5393 return rc;
5394 }
5395 }
5396
Emilian Peev0f3c3162017-03-15 12:57:46 +00005397 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5398 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005399 depthRequestPresent = true;
5400 continue;
5401 }
5402
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005403 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005404 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005405
5406 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5407 isVidBufRequested = true;
5408 }
5409 }
5410
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005411 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5412 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5413 itr++) {
5414 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5415 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5416 channel->getStreamID(channel->getStreamTypeMask());
5417
5418 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5419 isVidBufRequested = true;
5420 }
5421 }
5422
Thierry Strudel3d639192016-09-09 11:52:26 -07005423 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005424 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005425 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005426 }
5427 if (blob_request && mRawDumpChannel) {
5428 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005429 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005430 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005431 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005432 }
5433
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005434 {
5435 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5436 // Request a RAW buffer if
5437 // 1. mHdrPlusRawSrcChannel is valid.
5438 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5439 // 3. There is no pending HDR+ request.
5440 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5441 mHdrPlusPendingRequests.size() == 0) {
5442 streamsArray.stream_request[streamsArray.num_streams].streamID =
5443 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5444 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5445 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005446 }
5447
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005448 //extract capture intent
5449 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5450 mCaptureIntent =
5451 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5452 }
5453
5454 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5455 mCacMode =
5456 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5457 }
5458
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005459 uint8_t requestedLensShadingMapMode;
5460 // Get the shading map mode.
5461 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5462 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5463 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5464 } else {
5465 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5466 }
5467
Chien-Yu Chen21b9e9a2017-09-25 14:34:26 -07005468 if (meta.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
5469 mLastRequestedFaceDetectMode =
5470 meta.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
5471 }
5472
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005473 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005474 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005475
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005476 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005477 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005478 // If this request has a still capture intent, try to submit an HDR+ request.
5479 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5480 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5481 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5482 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005483 }
5484
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005485 if (hdrPlusRequest) {
5486 // For a HDR+ request, just set the frame parameters.
5487 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5488 if (rc < 0) {
5489 LOGE("fail to set frame parameters");
5490 pthread_mutex_unlock(&mMutex);
5491 return rc;
5492 }
5493 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005494 /* Parse the settings:
5495 * - For every request in NORMAL MODE
5496 * - For every request in HFR mode during preview only case
5497 * - For first request of every batch in HFR mode during video
5498 * recording. In batchmode the same settings except frame number is
5499 * repeated in each request of the batch.
5500 */
5501 if (!mBatchSize ||
5502 (mBatchSize && !isVidBufRequested) ||
5503 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005504 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005505 if (rc < 0) {
5506 LOGE("fail to set frame parameters");
5507 pthread_mutex_unlock(&mMutex);
5508 return rc;
5509 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005510
5511 {
Chien-Yu Chen21b9e9a2017-09-25 14:34:26 -07005512 // If HDR+ mode is enabled, override the following modes so the necessary metadata
5513 // will be included in the result metadata sent to Easel HDR+.
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005514 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5515 if (mHdrPlusModeEnabled) {
5516 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5517 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
Chien-Yu Chen21b9e9a2017-09-25 14:34:26 -07005518 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STATS_FACEDETECT_MODE,
5519 ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005520 }
5521 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005522 }
5523 /* For batchMode HFR, setFrameParameters is not called for every
5524 * request. But only frame number of the latest request is parsed.
5525 * Keep track of first and last frame numbers in a batch so that
5526 * metadata for the frame numbers of batch can be duplicated in
5527 * handleBatchMetadta */
5528 if (mBatchSize) {
5529 if (!mToBeQueuedVidBufs) {
5530 //start of the batch
5531 mFirstFrameNumberInBatch = request->frame_number;
5532 }
5533 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5534 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5535 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005536 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005537 return BAD_VALUE;
5538 }
5539 }
5540 if (mNeedSensorRestart) {
5541 /* Unlock the mutex as restartSensor waits on the channels to be
5542 * stopped, which in turn calls stream callback functions -
5543 * handleBufferWithLock and handleMetadataWithLock */
5544 pthread_mutex_unlock(&mMutex);
5545 rc = dynamicUpdateMetaStreamInfo();
5546 if (rc != NO_ERROR) {
5547 LOGE("Restarting the sensor failed");
5548 return BAD_VALUE;
5549 }
5550 mNeedSensorRestart = false;
5551 pthread_mutex_lock(&mMutex);
5552 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005553 if(mResetInstantAEC) {
5554 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5555 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5556 mResetInstantAEC = false;
5557 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005558 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005559 if (request->input_buffer->acquire_fence != -1) {
5560 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5561 close(request->input_buffer->acquire_fence);
5562 if (rc != OK) {
5563 LOGE("input buffer sync wait failed %d", rc);
5564 pthread_mutex_unlock(&mMutex);
5565 return rc;
5566 }
5567 }
5568 }
5569
5570 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5571 mLastCustIntentFrmNum = frameNumber;
5572 }
5573 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005574 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005575 pendingRequestIterator latestRequest;
5576 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005577 pendingRequest.num_buffers = depthRequestPresent ?
5578 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005579 pendingRequest.request_id = request_id;
5580 pendingRequest.blob_request = blob_request;
5581 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005582 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Chien-Yu Chen21b9e9a2017-09-25 14:34:26 -07005583 pendingRequest.requestedFaceDetectMode = mLastRequestedFaceDetectMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005584 if (request->input_buffer) {
5585 pendingRequest.input_buffer =
5586 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5587 *(pendingRequest.input_buffer) = *(request->input_buffer);
5588 pInputBuffer = pendingRequest.input_buffer;
5589 } else {
5590 pendingRequest.input_buffer = NULL;
5591 pInputBuffer = NULL;
5592 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005593 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005594
5595 pendingRequest.pipeline_depth = 0;
5596 pendingRequest.partial_result_cnt = 0;
5597 extractJpegMetadata(mCurJpegMeta, request);
5598 pendingRequest.jpegMetadata = mCurJpegMeta;
5599 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005600 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005601 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
Shuzhen Wang77b049a2017-08-30 12:24:36 -07005602 pendingRequest.hybrid_ae_enable =
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005603 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5604 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005605
Samuel Ha68ba5172016-12-15 18:41:12 -08005606 /* DevCamDebug metadata processCaptureRequest */
5607 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5608 mDevCamDebugMetaEnable =
5609 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5610 }
5611 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5612 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005613
5614 //extract CAC info
5615 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5616 mCacMode =
5617 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5618 }
5619 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005620 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005621 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5622 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005623
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005624 // extract enableZsl info
5625 if (gExposeEnableZslKey) {
5626 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5627 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5628 mZslEnabled = pendingRequest.enableZsl;
5629 } else {
5630 pendingRequest.enableZsl = mZslEnabled;
5631 }
5632 }
5633
Thierry Strudel3d639192016-09-09 11:52:26 -07005634 PendingBuffersInRequest bufsForCurRequest;
5635 bufsForCurRequest.frame_number = frameNumber;
5636 // Mark current timestamp for the new request
5637 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Binhao Lin09245482017-08-31 18:25:29 -07005638 bufsForCurRequest.av_timestamp = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005639 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005640
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005641 if (hdrPlusRequest) {
5642 // Save settings for this request.
5643 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5644 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5645
5646 // Add to pending HDR+ request queue.
5647 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5648 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5649
5650 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5651 }
5652
Thierry Strudel3d639192016-09-09 11:52:26 -07005653 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005654 if ((request->output_buffers[i].stream->data_space ==
5655 HAL_DATASPACE_DEPTH) &&
5656 (HAL_PIXEL_FORMAT_BLOB ==
5657 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005658 continue;
5659 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005660 RequestedBufferInfo requestedBuf;
5661 memset(&requestedBuf, 0, sizeof(requestedBuf));
5662 requestedBuf.stream = request->output_buffers[i].stream;
5663 requestedBuf.buffer = NULL;
5664 pendingRequest.buffers.push_back(requestedBuf);
5665
5666 // Add to buffer handle the pending buffers list
5667 PendingBufferInfo bufferInfo;
5668 bufferInfo.buffer = request->output_buffers[i].buffer;
5669 bufferInfo.stream = request->output_buffers[i].stream;
5670 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5671 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5672 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5673 frameNumber, bufferInfo.buffer,
5674 channel->getStreamTypeMask(), bufferInfo.stream->format);
5675 }
5676 // Add this request packet into mPendingBuffersMap
5677 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5678 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5679 mPendingBuffersMap.get_num_overall_buffers());
5680
5681 latestRequest = mPendingRequestsList.insert(
5682 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005683
5684 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5685 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005686 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005687 for (size_t i = 0; i < request->num_output_buffers; i++) {
5688 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5689 }
5690
Thierry Strudel3d639192016-09-09 11:52:26 -07005691 if(mFlush) {
5692 LOGI("mFlush is true");
5693 pthread_mutex_unlock(&mMutex);
5694 return NO_ERROR;
5695 }
5696
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005697 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5698 // channel.
5699 if (!hdrPlusRequest) {
5700 int indexUsed;
5701 // Notify metadata channel we receive a request
5702 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005703
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005704 if(request->input_buffer != NULL){
5705 LOGD("Input request, frame_number %d", frameNumber);
5706 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5707 if (NO_ERROR != rc) {
5708 LOGE("fail to set reproc parameters");
5709 pthread_mutex_unlock(&mMutex);
5710 return rc;
5711 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005712 }
5713
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005714 // Call request on other streams
5715 uint32_t streams_need_metadata = 0;
5716 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5717 for (size_t i = 0; i < request->num_output_buffers; i++) {
5718 const camera3_stream_buffer_t& output = request->output_buffers[i];
5719 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5720
5721 if (channel == NULL) {
5722 LOGW("invalid channel pointer for stream");
5723 continue;
5724 }
5725
5726 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5727 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5728 output.buffer, request->input_buffer, frameNumber);
5729 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005730 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005731 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5732 if (rc < 0) {
5733 LOGE("Fail to request on picture channel");
5734 pthread_mutex_unlock(&mMutex);
5735 return rc;
5736 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005737 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005738 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5739 assert(NULL != mDepthChannel);
5740 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005741
Emilian Peev7650c122017-01-19 08:24:33 -08005742 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5743 if (rc < 0) {
5744 LOGE("Fail to map on depth buffer");
5745 pthread_mutex_unlock(&mMutex);
5746 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005747 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005748 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005749 } else {
5750 LOGD("snapshot request with buffer %p, frame_number %d",
5751 output.buffer, frameNumber);
5752 if (!request->settings) {
5753 rc = channel->request(output.buffer, frameNumber,
5754 NULL, mPrevParameters, indexUsed);
5755 } else {
5756 rc = channel->request(output.buffer, frameNumber,
5757 NULL, mParameters, indexUsed);
5758 }
5759 if (rc < 0) {
5760 LOGE("Fail to request on picture channel");
5761 pthread_mutex_unlock(&mMutex);
5762 return rc;
5763 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005764
Emilian Peev7650c122017-01-19 08:24:33 -08005765 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5766 uint32_t j = 0;
5767 for (j = 0; j < streamsArray.num_streams; j++) {
5768 if (streamsArray.stream_request[j].streamID == streamId) {
5769 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5770 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5771 else
5772 streamsArray.stream_request[j].buf_index = indexUsed;
5773 break;
5774 }
5775 }
5776 if (j == streamsArray.num_streams) {
5777 LOGE("Did not find matching stream to update index");
5778 assert(0);
5779 }
5780
5781 pendingBufferIter->need_metadata = true;
5782 streams_need_metadata++;
5783 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005784 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005785 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5786 bool needMetadata = false;
5787 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5788 rc = yuvChannel->request(output.buffer, frameNumber,
5789 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5790 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005791 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005792 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005793 pthread_mutex_unlock(&mMutex);
5794 return rc;
5795 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005796
5797 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5798 uint32_t j = 0;
5799 for (j = 0; j < streamsArray.num_streams; j++) {
5800 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005801 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5802 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5803 else
5804 streamsArray.stream_request[j].buf_index = indexUsed;
5805 break;
5806 }
5807 }
5808 if (j == streamsArray.num_streams) {
5809 LOGE("Did not find matching stream to update index");
5810 assert(0);
5811 }
5812
5813 pendingBufferIter->need_metadata = needMetadata;
5814 if (needMetadata)
5815 streams_need_metadata += 1;
5816 LOGD("calling YUV channel request, need_metadata is %d",
5817 needMetadata);
5818 } else {
5819 LOGD("request with buffer %p, frame_number %d",
5820 output.buffer, frameNumber);
5821
5822 rc = channel->request(output.buffer, frameNumber, indexUsed);
5823
5824 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5825 uint32_t j = 0;
5826 for (j = 0; j < streamsArray.num_streams; j++) {
5827 if (streamsArray.stream_request[j].streamID == streamId) {
5828 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5829 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5830 else
5831 streamsArray.stream_request[j].buf_index = indexUsed;
5832 break;
5833 }
5834 }
5835 if (j == streamsArray.num_streams) {
5836 LOGE("Did not find matching stream to update index");
5837 assert(0);
5838 }
5839
5840 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5841 && mBatchSize) {
5842 mToBeQueuedVidBufs++;
5843 if (mToBeQueuedVidBufs == mBatchSize) {
5844 channel->queueBatchBuf();
5845 }
5846 }
5847 if (rc < 0) {
5848 LOGE("request failed");
5849 pthread_mutex_unlock(&mMutex);
5850 return rc;
5851 }
5852 }
5853 pendingBufferIter++;
5854 }
5855
5856 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5857 itr++) {
5858 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5859
5860 if (channel == NULL) {
5861 LOGE("invalid channel pointer for stream");
5862 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005863 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005864 return BAD_VALUE;
5865 }
5866
5867 InternalRequest requestedStream;
5868 requestedStream = (*itr);
5869
5870
5871 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5872 LOGD("snapshot request internally input buffer %p, frame_number %d",
5873 request->input_buffer, frameNumber);
5874 if(request->input_buffer != NULL){
5875 rc = channel->request(NULL, frameNumber,
5876 pInputBuffer, &mReprocMeta, indexUsed, true,
5877 requestedStream.meteringOnly);
5878 if (rc < 0) {
5879 LOGE("Fail to request on picture channel");
5880 pthread_mutex_unlock(&mMutex);
5881 return rc;
5882 }
5883 } else {
5884 LOGD("snapshot request with frame_number %d", frameNumber);
5885 if (!request->settings) {
5886 rc = channel->request(NULL, frameNumber,
5887 NULL, mPrevParameters, indexUsed, true,
5888 requestedStream.meteringOnly);
5889 } else {
5890 rc = channel->request(NULL, frameNumber,
5891 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5892 }
5893 if (rc < 0) {
5894 LOGE("Fail to request on picture channel");
5895 pthread_mutex_unlock(&mMutex);
5896 return rc;
5897 }
5898
5899 if ((*itr).meteringOnly != 1) {
5900 requestedStream.need_metadata = 1;
5901 streams_need_metadata++;
5902 }
5903 }
5904
5905 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5906 uint32_t j = 0;
5907 for (j = 0; j < streamsArray.num_streams; j++) {
5908 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005909 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5910 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5911 else
5912 streamsArray.stream_request[j].buf_index = indexUsed;
5913 break;
5914 }
5915 }
5916 if (j == streamsArray.num_streams) {
5917 LOGE("Did not find matching stream to update index");
5918 assert(0);
5919 }
5920
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005921 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005922 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005923 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005924 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005925 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005926 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005927 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005928 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005929
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005930 //If 2 streams have need_metadata set to true, fail the request, unless
5931 //we copy/reference count the metadata buffer
5932 if (streams_need_metadata > 1) {
5933 LOGE("not supporting request in which two streams requires"
5934 " 2 HAL metadata for reprocessing");
5935 pthread_mutex_unlock(&mMutex);
5936 return -EINVAL;
5937 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005938
Emilian Peev656e4fa2017-06-02 16:47:04 +01005939 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5940 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5941 if (depthRequestPresent && mDepthChannel) {
5942 if (request->settings) {
5943 camera_metadata_ro_entry entry;
5944 if (find_camera_metadata_ro_entry(request->settings,
5945 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5946 if (entry.data.u8[0]) {
5947 pdafEnable = CAM_PD_DATA_ENABLED;
5948 } else {
5949 pdafEnable = CAM_PD_DATA_SKIP;
5950 }
5951 mDepthCloudMode = pdafEnable;
5952 } else {
5953 pdafEnable = mDepthCloudMode;
5954 }
5955 } else {
5956 pdafEnable = mDepthCloudMode;
5957 }
5958 }
5959
Emilian Peev7650c122017-01-19 08:24:33 -08005960 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5961 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5962 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5963 pthread_mutex_unlock(&mMutex);
5964 return BAD_VALUE;
5965 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005966
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005967 if (request->input_buffer == NULL) {
5968 /* Set the parameters to backend:
5969 * - For every request in NORMAL MODE
5970 * - For every request in HFR mode during preview only case
5971 * - Once every batch in HFR mode during video recording
5972 */
5973 if (!mBatchSize ||
5974 (mBatchSize && !isVidBufRequested) ||
5975 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5976 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5977 mBatchSize, isVidBufRequested,
5978 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005979
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005980 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5981 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5982 uint32_t m = 0;
5983 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5984 if (streamsArray.stream_request[k].streamID ==
5985 mBatchedStreamsArray.stream_request[m].streamID)
5986 break;
5987 }
5988 if (m == mBatchedStreamsArray.num_streams) {
5989 mBatchedStreamsArray.stream_request\
5990 [mBatchedStreamsArray.num_streams].streamID =
5991 streamsArray.stream_request[k].streamID;
5992 mBatchedStreamsArray.stream_request\
5993 [mBatchedStreamsArray.num_streams].buf_index =
5994 streamsArray.stream_request[k].buf_index;
5995 mBatchedStreamsArray.num_streams =
5996 mBatchedStreamsArray.num_streams + 1;
5997 }
5998 }
5999 streamsArray = mBatchedStreamsArray;
6000 }
6001 /* Update stream id of all the requested buffers */
6002 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
6003 streamsArray)) {
6004 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006005 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006006 return BAD_VALUE;
6007 }
6008
6009 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
6010 mParameters);
6011 if (rc < 0) {
6012 LOGE("set_parms failed");
6013 }
6014 /* reset to zero coz, the batch is queued */
6015 mToBeQueuedVidBufs = 0;
6016 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
6017 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
6018 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08006019 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
6020 uint32_t m = 0;
6021 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
6022 if (streamsArray.stream_request[k].streamID ==
6023 mBatchedStreamsArray.stream_request[m].streamID)
6024 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006025 }
6026 if (m == mBatchedStreamsArray.num_streams) {
6027 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
6028 streamID = streamsArray.stream_request[k].streamID;
6029 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
6030 buf_index = streamsArray.stream_request[k].buf_index;
6031 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
6032 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08006033 }
6034 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006035 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006036
6037 // Start all streams after the first setting is sent, so that the
6038 // setting can be applied sooner: (0 + apply_delay)th frame.
6039 if (mState == CONFIGURED && mChannelHandle) {
6040 //Then start them.
6041 LOGH("Start META Channel");
6042 rc = mMetadataChannel->start();
6043 if (rc < 0) {
6044 LOGE("META channel start failed");
6045 pthread_mutex_unlock(&mMutex);
6046 return rc;
6047 }
6048
6049 if (mAnalysisChannel) {
6050 rc = mAnalysisChannel->start();
6051 if (rc < 0) {
6052 LOGE("Analysis channel start failed");
6053 mMetadataChannel->stop();
6054 pthread_mutex_unlock(&mMutex);
6055 return rc;
6056 }
6057 }
6058
6059 if (mSupportChannel) {
6060 rc = mSupportChannel->start();
6061 if (rc < 0) {
6062 LOGE("Support channel start failed");
6063 mMetadataChannel->stop();
6064 /* Although support and analysis are mutually exclusive today
6065 adding it in anycase for future proofing */
6066 if (mAnalysisChannel) {
6067 mAnalysisChannel->stop();
6068 }
6069 pthread_mutex_unlock(&mMutex);
6070 return rc;
6071 }
6072 }
6073 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6074 it != mStreamInfo.end(); it++) {
6075 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
6076 LOGH("Start Processing Channel mask=%d",
6077 channel->getStreamTypeMask());
6078 rc = channel->start();
6079 if (rc < 0) {
6080 LOGE("channel start failed");
6081 pthread_mutex_unlock(&mMutex);
6082 return rc;
6083 }
6084 }
6085
6086 if (mRawDumpChannel) {
6087 LOGD("Starting raw dump stream");
6088 rc = mRawDumpChannel->start();
6089 if (rc != NO_ERROR) {
6090 LOGE("Error Starting Raw Dump Channel");
6091 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6092 it != mStreamInfo.end(); it++) {
6093 QCamera3Channel *channel =
6094 (QCamera3Channel *)(*it)->stream->priv;
6095 LOGH("Stopping Processing Channel mask=%d",
6096 channel->getStreamTypeMask());
6097 channel->stop();
6098 }
6099 if (mSupportChannel)
6100 mSupportChannel->stop();
6101 if (mAnalysisChannel) {
6102 mAnalysisChannel->stop();
6103 }
6104 mMetadataChannel->stop();
6105 pthread_mutex_unlock(&mMutex);
6106 return rc;
6107 }
6108 }
6109
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006110 // Configure modules for stream on.
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006111 rc = startChannelLocked();
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006112 if (rc != NO_ERROR) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006113 LOGE("startChannelLocked failed %d", rc);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006114 pthread_mutex_unlock(&mMutex);
6115 return rc;
6116 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006117 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006118 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006119 }
6120
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -07006121 // Enable HDR+ mode for the first PREVIEW_INTENT request that doesn't disable HDR+.
Chenjie Luo4a761802017-06-13 17:35:54 +00006122 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006123 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006124 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006125 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6126 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6127 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -07006128 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW &&
6129 meta.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
6130 meta.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 0) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006131
6132 if (isSessionHdrPlusModeCompatible()) {
6133 rc = enableHdrPlusModeLocked();
6134 if (rc != OK) {
6135 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6136 pthread_mutex_unlock(&mMutex);
6137 return rc;
6138 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006139 }
6140
6141 mFirstPreviewIntentSeen = true;
6142 }
6143 }
6144
Thierry Strudel3d639192016-09-09 11:52:26 -07006145 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6146
6147 mState = STARTED;
6148 // Added a timed condition wait
6149 struct timespec ts;
6150 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006151 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006152 if (rc < 0) {
6153 isValidTimeout = 0;
6154 LOGE("Error reading the real time clock!!");
6155 }
6156 else {
6157 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006158 int64_t timeout = 5;
6159 {
6160 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6161 // If there is a pending HDR+ request, the following requests may be blocked until the
6162 // HDR+ request is done. So allow a longer timeout.
6163 if (mHdrPlusPendingRequests.size() > 0) {
6164 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6165 }
6166 }
6167 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006168 }
6169 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006170 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006171 (mState != ERROR) && (mState != DEINIT)) {
6172 if (!isValidTimeout) {
6173 LOGD("Blocking on conditional wait");
6174 pthread_cond_wait(&mRequestCond, &mMutex);
6175 }
6176 else {
6177 LOGD("Blocking on timed conditional wait");
6178 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6179 if (rc == ETIMEDOUT) {
6180 rc = -ENODEV;
6181 LOGE("Unblocked on timeout!!!!");
6182 break;
6183 }
6184 }
6185 LOGD("Unblocked");
6186 if (mWokenUpByDaemon) {
6187 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006188 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006189 break;
6190 }
6191 }
6192 pthread_mutex_unlock(&mMutex);
6193
6194 return rc;
6195}
6196
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006197int32_t QCamera3HardwareInterface::startChannelLocked()
6198{
6199 // Configure modules for stream on.
6200 int32_t rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6201 mChannelHandle, /*start_sensor_streaming*/false);
6202 if (rc != NO_ERROR) {
6203 LOGE("start_channel failed %d", rc);
6204 return rc;
6205 }
6206
6207 {
6208 // Configure Easel for stream on.
6209 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6210
6211 // Now that sensor mode should have been selected, get the selected sensor mode
6212 // info.
6213 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6214 getCurrentSensorModeInfo(mSensorModeInfo);
6215
6216 if (EaselManagerClientOpened) {
6217 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
6218 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6219 /*enableCapture*/true);
6220 if (rc != OK) {
6221 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6222 mCameraId, mSensorModeInfo.op_pixel_clk);
6223 return rc;
6224 }
6225 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
6226 mEaselMipiStarted = true;
6227 }
6228 }
6229
6230 // Start sensor streaming.
6231 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6232 mChannelHandle);
6233 if (rc != NO_ERROR) {
6234 LOGE("start_sensor_stream_on failed %d", rc);
6235 return rc;
6236 }
6237
6238 return 0;
6239}
6240
6241void QCamera3HardwareInterface::stopChannelLocked(bool stopChannelImmediately)
6242{
6243 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6244 mChannelHandle, stopChannelImmediately);
6245
6246 {
6247 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6248 if (EaselManagerClientOpened && mEaselMipiStarted) {
6249 int32_t rc = gEaselManagerClient->stopMipi(mCameraId);
6250 if (rc != 0) {
6251 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
6252 }
6253 mEaselMipiStarted = false;
6254 }
6255 }
6256}
6257
Thierry Strudel3d639192016-09-09 11:52:26 -07006258/*===========================================================================
6259 * FUNCTION : dump
6260 *
6261 * DESCRIPTION:
6262 *
6263 * PARAMETERS :
6264 *
6265 *
6266 * RETURN :
6267 *==========================================================================*/
6268void QCamera3HardwareInterface::dump(int fd)
6269{
6270 pthread_mutex_lock(&mMutex);
6271 dprintf(fd, "\n Camera HAL3 information Begin \n");
6272
6273 dprintf(fd, "\nNumber of pending requests: %zu \n",
6274 mPendingRequestsList.size());
6275 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6276 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6277 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6278 for(pendingRequestIterator i = mPendingRequestsList.begin();
6279 i != mPendingRequestsList.end(); i++) {
6280 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6281 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6282 i->input_buffer);
6283 }
6284 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6285 mPendingBuffersMap.get_num_overall_buffers());
6286 dprintf(fd, "-------+------------------\n");
6287 dprintf(fd, " Frame | Stream type mask \n");
6288 dprintf(fd, "-------+------------------\n");
6289 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6290 for(auto &j : req.mPendingBufferList) {
6291 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6292 dprintf(fd, " %5d | %11d \n",
6293 req.frame_number, channel->getStreamTypeMask());
6294 }
6295 }
6296 dprintf(fd, "-------+------------------\n");
6297
6298 dprintf(fd, "\nPending frame drop list: %zu\n",
6299 mPendingFrameDropList.size());
6300 dprintf(fd, "-------+-----------\n");
6301 dprintf(fd, " Frame | Stream ID \n");
6302 dprintf(fd, "-------+-----------\n");
6303 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6304 i != mPendingFrameDropList.end(); i++) {
6305 dprintf(fd, " %5d | %9d \n",
6306 i->frame_number, i->stream_ID);
6307 }
6308 dprintf(fd, "-------+-----------\n");
6309
6310 dprintf(fd, "\n Camera HAL3 information End \n");
6311
6312 /* use dumpsys media.camera as trigger to send update debug level event */
6313 mUpdateDebugLevel = true;
6314 pthread_mutex_unlock(&mMutex);
6315 return;
6316}
6317
6318/*===========================================================================
6319 * FUNCTION : flush
6320 *
6321 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6322 * conditionally restarts channels
6323 *
6324 * PARAMETERS :
6325 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006326 * @ stopChannelImmediately: stop the channel immediately. This should be used
6327 * when device encountered an error and MIPI may has
6328 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006329 *
6330 * RETURN :
6331 * 0 on success
6332 * Error code on failure
6333 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006334int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006335{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006336 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006337 int32_t rc = NO_ERROR;
6338
6339 LOGD("Unblocking Process Capture Request");
6340 pthread_mutex_lock(&mMutex);
6341 mFlush = true;
6342 pthread_mutex_unlock(&mMutex);
6343
Chien-Yu Chen11c8edc2017-09-11 20:54:24 -07006344 // Disable HDR+ if it's enabled;
6345 {
6346 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6347 finishHdrPlusClientOpeningLocked(l);
6348 disableHdrPlusModeLocked();
6349 }
6350
Thierry Strudel3d639192016-09-09 11:52:26 -07006351 rc = stopAllChannels();
6352 // unlink of dualcam
6353 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006354 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6355 &m_pDualCamCmdPtr->bundle_info;
6356 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006357 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6358 pthread_mutex_lock(&gCamLock);
6359
6360 if (mIsMainCamera == 1) {
6361 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6362 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006363 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006364 // related session id should be session id of linked session
6365 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6366 } else {
6367 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6368 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006369 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006370 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6371 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006372 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006373 pthread_mutex_unlock(&gCamLock);
6374
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006375 rc = mCameraHandle->ops->set_dual_cam_cmd(
6376 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006377 if (rc < 0) {
6378 LOGE("Dualcam: Unlink failed, but still proceed to close");
6379 }
6380 }
6381
6382 if (rc < 0) {
6383 LOGE("stopAllChannels failed");
6384 return rc;
6385 }
6386 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006387 stopChannelLocked(stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006388 }
6389
6390 // Reset bundle info
6391 rc = setBundleInfo();
6392 if (rc < 0) {
6393 LOGE("setBundleInfo failed %d", rc);
6394 return rc;
6395 }
6396
6397 // Mutex Lock
6398 pthread_mutex_lock(&mMutex);
6399
6400 // Unblock process_capture_request
6401 mPendingLiveRequest = 0;
6402 pthread_cond_signal(&mRequestCond);
6403
6404 rc = notifyErrorForPendingRequests();
6405 if (rc < 0) {
6406 LOGE("notifyErrorForPendingRequests failed");
6407 pthread_mutex_unlock(&mMutex);
6408 return rc;
6409 }
6410
6411 mFlush = false;
6412
6413 // Start the Streams/Channels
6414 if (restartChannels) {
6415 rc = startAllChannels();
6416 if (rc < 0) {
6417 LOGE("startAllChannels failed");
6418 pthread_mutex_unlock(&mMutex);
6419 return rc;
6420 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006421 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006422 // Configure modules for stream on.
6423 rc = startChannelLocked();
Thierry Strudel2896d122017-02-23 19:18:03 -08006424 if (rc < 0) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006425 LOGE("startChannelLocked failed");
Thierry Strudel2896d122017-02-23 19:18:03 -08006426 pthread_mutex_unlock(&mMutex);
6427 return rc;
6428 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006429 }
6430 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006431 pthread_mutex_unlock(&mMutex);
6432
6433 return 0;
6434}
6435
6436/*===========================================================================
6437 * FUNCTION : flushPerf
6438 *
6439 * DESCRIPTION: This is the performance optimization version of flush that does
6440 * not use stream off, rather flushes the system
6441 *
6442 * PARAMETERS :
6443 *
6444 *
6445 * RETURN : 0 : success
6446 * -EINVAL: input is malformed (device is not valid)
6447 * -ENODEV: if the device has encountered a serious error
6448 *==========================================================================*/
6449int QCamera3HardwareInterface::flushPerf()
6450{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006451 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006452 int32_t rc = 0;
6453 struct timespec timeout;
6454 bool timed_wait = false;
6455
6456 pthread_mutex_lock(&mMutex);
6457 mFlushPerf = true;
6458 mPendingBuffersMap.numPendingBufsAtFlush =
6459 mPendingBuffersMap.get_num_overall_buffers();
6460 LOGD("Calling flush. Wait for %d buffers to return",
6461 mPendingBuffersMap.numPendingBufsAtFlush);
6462
6463 /* send the flush event to the backend */
6464 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6465 if (rc < 0) {
6466 LOGE("Error in flush: IOCTL failure");
6467 mFlushPerf = false;
6468 pthread_mutex_unlock(&mMutex);
6469 return -ENODEV;
6470 }
6471
6472 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6473 LOGD("No pending buffers in HAL, return flush");
6474 mFlushPerf = false;
6475 pthread_mutex_unlock(&mMutex);
6476 return rc;
6477 }
6478
6479 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006480 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006481 if (rc < 0) {
6482 LOGE("Error reading the real time clock, cannot use timed wait");
6483 } else {
6484 timeout.tv_sec += FLUSH_TIMEOUT;
6485 timed_wait = true;
6486 }
6487
6488 //Block on conditional variable
6489 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6490 LOGD("Waiting on mBuffersCond");
6491 if (!timed_wait) {
6492 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6493 if (rc != 0) {
6494 LOGE("pthread_cond_wait failed due to rc = %s",
6495 strerror(rc));
6496 break;
6497 }
6498 } else {
6499 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6500 if (rc != 0) {
6501 LOGE("pthread_cond_timedwait failed due to rc = %s",
6502 strerror(rc));
6503 break;
6504 }
6505 }
6506 }
6507 if (rc != 0) {
6508 mFlushPerf = false;
6509 pthread_mutex_unlock(&mMutex);
6510 return -ENODEV;
6511 }
6512
6513 LOGD("Received buffers, now safe to return them");
6514
6515 //make sure the channels handle flush
6516 //currently only required for the picture channel to release snapshot resources
6517 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6518 it != mStreamInfo.end(); it++) {
6519 QCamera3Channel *channel = (*it)->channel;
6520 if (channel) {
6521 rc = channel->flush();
6522 if (rc) {
6523 LOGE("Flushing the channels failed with error %d", rc);
6524 // even though the channel flush failed we need to continue and
6525 // return the buffers we have to the framework, however the return
6526 // value will be an error
6527 rc = -ENODEV;
6528 }
6529 }
6530 }
6531
6532 /* notify the frameworks and send errored results */
6533 rc = notifyErrorForPendingRequests();
6534 if (rc < 0) {
6535 LOGE("notifyErrorForPendingRequests failed");
6536 pthread_mutex_unlock(&mMutex);
6537 return rc;
6538 }
6539
6540 //unblock process_capture_request
6541 mPendingLiveRequest = 0;
6542 unblockRequestIfNecessary();
6543
6544 mFlushPerf = false;
6545 pthread_mutex_unlock(&mMutex);
6546 LOGD ("Flush Operation complete. rc = %d", rc);
6547 return rc;
6548}
6549
6550/*===========================================================================
6551 * FUNCTION : handleCameraDeviceError
6552 *
6553 * DESCRIPTION: This function calls internal flush and notifies the error to
6554 * framework and updates the state variable.
6555 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006556 * PARAMETERS :
6557 * @stopChannelImmediately : stop channels immediately without waiting for
6558 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006559 *
6560 * RETURN : NO_ERROR on Success
6561 * Error code on failure
6562 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006563int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006564{
6565 int32_t rc = NO_ERROR;
6566
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006567 {
6568 Mutex::Autolock lock(mFlushLock);
6569 pthread_mutex_lock(&mMutex);
6570 if (mState != ERROR) {
6571 //if mState != ERROR, nothing to be done
6572 pthread_mutex_unlock(&mMutex);
6573 return NO_ERROR;
6574 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006575 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006576
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006577 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006578 if (NO_ERROR != rc) {
6579 LOGE("internal flush to handle mState = ERROR failed");
6580 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006581
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006582 pthread_mutex_lock(&mMutex);
6583 mState = DEINIT;
6584 pthread_mutex_unlock(&mMutex);
6585 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006586
6587 camera3_notify_msg_t notify_msg;
6588 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6589 notify_msg.type = CAMERA3_MSG_ERROR;
6590 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6591 notify_msg.message.error.error_stream = NULL;
6592 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006593 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006594
6595 return rc;
6596}
6597
6598/*===========================================================================
6599 * FUNCTION : captureResultCb
6600 *
6601 * DESCRIPTION: Callback handler for all capture result
6602 * (streams, as well as metadata)
6603 *
6604 * PARAMETERS :
6605 * @metadata : metadata information
6606 * @buffer : actual gralloc buffer to be returned to frameworks.
6607 * NULL if metadata.
6608 *
6609 * RETURN : NONE
6610 *==========================================================================*/
6611void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6612 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6613{
6614 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006615 pthread_mutex_lock(&mMutex);
6616 uint8_t batchSize = mBatchSize;
6617 pthread_mutex_unlock(&mMutex);
6618 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006619 handleBatchMetadata(metadata_buf,
6620 true /* free_and_bufdone_meta_buf */);
6621 } else { /* mBatchSize = 0 */
6622 hdrPlusPerfLock(metadata_buf);
6623 pthread_mutex_lock(&mMutex);
6624 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006625 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006626 true /* last urgent frame of batch metadata */,
6627 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006628 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006629 pthread_mutex_unlock(&mMutex);
6630 }
6631 } else if (isInputBuffer) {
6632 pthread_mutex_lock(&mMutex);
6633 handleInputBufferWithLock(frame_number);
6634 pthread_mutex_unlock(&mMutex);
6635 } else {
6636 pthread_mutex_lock(&mMutex);
6637 handleBufferWithLock(buffer, frame_number);
6638 pthread_mutex_unlock(&mMutex);
6639 }
6640 return;
6641}
6642
6643/*===========================================================================
6644 * FUNCTION : getReprocessibleOutputStreamId
6645 *
6646 * DESCRIPTION: Get source output stream id for the input reprocess stream
6647 * based on size and format, which would be the largest
6648 * output stream if an input stream exists.
6649 *
6650 * PARAMETERS :
6651 * @id : return the stream id if found
6652 *
6653 * RETURN : int32_t type of status
6654 * NO_ERROR -- success
6655 * none-zero failure code
6656 *==========================================================================*/
6657int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6658{
6659 /* check if any output or bidirectional stream with the same size and format
6660 and return that stream */
6661 if ((mInputStreamInfo.dim.width > 0) &&
6662 (mInputStreamInfo.dim.height > 0)) {
6663 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6664 it != mStreamInfo.end(); it++) {
6665
6666 camera3_stream_t *stream = (*it)->stream;
6667 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6668 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6669 (stream->format == mInputStreamInfo.format)) {
6670 // Usage flag for an input stream and the source output stream
6671 // may be different.
6672 LOGD("Found reprocessible output stream! %p", *it);
6673 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6674 stream->usage, mInputStreamInfo.usage);
6675
6676 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6677 if (channel != NULL && channel->mStreams[0]) {
6678 id = channel->mStreams[0]->getMyServerID();
6679 return NO_ERROR;
6680 }
6681 }
6682 }
6683 } else {
6684 LOGD("No input stream, so no reprocessible output stream");
6685 }
6686 return NAME_NOT_FOUND;
6687}
6688
6689/*===========================================================================
6690 * FUNCTION : lookupFwkName
6691 *
6692 * DESCRIPTION: In case the enum is not same in fwk and backend
6693 * make sure the parameter is correctly propogated
6694 *
6695 * PARAMETERS :
6696 * @arr : map between the two enums
6697 * @len : len of the map
6698 * @hal_name : name of the hal_parm to map
6699 *
6700 * RETURN : int type of status
6701 * fwk_name -- success
6702 * none-zero failure code
6703 *==========================================================================*/
6704template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6705 size_t len, halType hal_name)
6706{
6707
6708 for (size_t i = 0; i < len; i++) {
6709 if (arr[i].hal_name == hal_name) {
6710 return arr[i].fwk_name;
6711 }
6712 }
6713
6714 /* Not able to find matching framework type is not necessarily
6715 * an error case. This happens when mm-camera supports more attributes
6716 * than the frameworks do */
6717 LOGH("Cannot find matching framework type");
6718 return NAME_NOT_FOUND;
6719}
6720
6721/*===========================================================================
6722 * FUNCTION : lookupHalName
6723 *
6724 * DESCRIPTION: In case the enum is not same in fwk and backend
6725 * make sure the parameter is correctly propogated
6726 *
6727 * PARAMETERS :
6728 * @arr : map between the two enums
6729 * @len : len of the map
6730 * @fwk_name : name of the hal_parm to map
6731 *
6732 * RETURN : int32_t type of status
6733 * hal_name -- success
6734 * none-zero failure code
6735 *==========================================================================*/
6736template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6737 size_t len, fwkType fwk_name)
6738{
6739 for (size_t i = 0; i < len; i++) {
6740 if (arr[i].fwk_name == fwk_name) {
6741 return arr[i].hal_name;
6742 }
6743 }
6744
6745 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6746 return NAME_NOT_FOUND;
6747}
6748
6749/*===========================================================================
6750 * FUNCTION : lookupProp
6751 *
6752 * DESCRIPTION: lookup a value by its name
6753 *
6754 * PARAMETERS :
6755 * @arr : map between the two enums
6756 * @len : size of the map
6757 * @name : name to be looked up
6758 *
6759 * RETURN : Value if found
6760 * CAM_CDS_MODE_MAX if not found
6761 *==========================================================================*/
6762template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6763 size_t len, const char *name)
6764{
6765 if (name) {
6766 for (size_t i = 0; i < len; i++) {
6767 if (!strcmp(arr[i].desc, name)) {
6768 return arr[i].val;
6769 }
6770 }
6771 }
6772 return CAM_CDS_MODE_MAX;
6773}
6774
6775/*===========================================================================
6776 *
6777 * DESCRIPTION:
6778 *
6779 * PARAMETERS :
6780 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006781 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006782 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006783 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6784 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006785 *
6786 * RETURN : camera_metadata_t*
6787 * metadata in a format specified by fwk
6788 *==========================================================================*/
6789camera_metadata_t*
6790QCamera3HardwareInterface::translateFromHalMetadata(
6791 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006792 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006793 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006794 bool lastMetadataInBatch,
6795 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006796{
6797 CameraMetadata camMetadata;
6798 camera_metadata_t *resultMetadata;
6799
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006800 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006801 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6802 * Timestamp is needed because it's used for shutter notify calculation.
6803 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006804 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006805 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006806 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006807 }
6808
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006809 if (pendingRequest.jpegMetadata.entryCount())
6810 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006811
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006812 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6813 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6814 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6815 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6816 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006817 if (mBatchSize == 0) {
6818 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006819 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006820 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006821
Samuel Ha68ba5172016-12-15 18:41:12 -08006822 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6823 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006824 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006825 // DevCamDebug metadata translateFromHalMetadata AF
6826 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6827 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6828 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6829 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6830 }
6831 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07006832 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006833 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6834 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6835 }
6836 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07006837 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006838 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6839 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6840 }
6841 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6842 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6843 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6844 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6845 }
6846 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6847 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6848 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6849 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6850 }
6851 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6852 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6853 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6854 *DevCamDebug_af_monitor_pdaf_target_pos;
6855 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6856 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6857 }
6858 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6859 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6860 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6861 *DevCamDebug_af_monitor_pdaf_confidence;
6862 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6863 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6864 }
6865 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6866 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6867 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6868 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6869 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6870 }
6871 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6872 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6873 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6874 *DevCamDebug_af_monitor_tof_target_pos;
6875 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6876 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6877 }
6878 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6879 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6880 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6881 *DevCamDebug_af_monitor_tof_confidence;
6882 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6883 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6884 }
6885 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6886 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6887 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6888 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6889 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6890 }
6891 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6892 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6893 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6894 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6895 &fwk_DevCamDebug_af_monitor_type_select, 1);
6896 }
6897 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6898 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6899 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6900 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6901 &fwk_DevCamDebug_af_monitor_refocus, 1);
6902 }
6903 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6904 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6905 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6906 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6907 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6908 }
6909 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6910 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6911 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6912 *DevCamDebug_af_search_pdaf_target_pos;
6913 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6914 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6915 }
6916 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6917 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6918 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6919 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6920 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6921 }
6922 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6923 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6924 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6925 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6926 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6927 }
6928 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6929 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6930 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6931 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6932 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6933 }
6934 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6935 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6936 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6937 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6938 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6939 }
6940 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6941 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6942 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6943 *DevCamDebug_af_search_tof_target_pos;
6944 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6945 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6946 }
6947 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6948 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6949 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6950 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6951 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6952 }
6953 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6954 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6955 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6956 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6957 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6958 }
6959 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6960 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6961 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6962 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6963 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6964 }
6965 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6966 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6967 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6968 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6969 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6970 }
6971 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6972 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6973 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6974 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6975 &fwk_DevCamDebug_af_search_type_select, 1);
6976 }
6977 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6978 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6979 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6980 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6981 &fwk_DevCamDebug_af_search_next_pos, 1);
6982 }
6983 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6984 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6985 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6986 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6987 &fwk_DevCamDebug_af_search_target_pos, 1);
6988 }
6989 // DevCamDebug metadata translateFromHalMetadata AEC
6990 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6991 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6992 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6993 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6994 }
6995 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6996 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6997 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6998 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6999 }
7000 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
7001 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
7002 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
7003 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
7004 }
7005 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
7006 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
7007 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
7008 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
7009 }
7010 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
7011 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
7012 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
7013 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
7014 }
7015 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
7016 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
7017 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
7018 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
7019 }
7020 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
7021 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
7022 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
7023 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
7024 }
7025 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
7026 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
7027 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
7028 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
7029 }
Samuel Ha34229982017-02-17 13:51:11 -08007030 // DevCamDebug metadata translateFromHalMetadata zzHDR
7031 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
7032 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
7033 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
7034 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
7035 }
7036 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
7037 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07007038 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08007039 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
7040 }
7041 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
7042 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
7043 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
7044 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
7045 }
7046 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
7047 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07007048 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08007049 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
7050 }
7051 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
7052 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
7053 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
7054 *DevCamDebug_aec_hdr_sensitivity_ratio;
7055 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
7056 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
7057 }
7058 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
7059 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
7060 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
7061 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
7062 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
7063 }
7064 // DevCamDebug metadata translateFromHalMetadata ADRC
7065 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
7066 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
7067 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
7068 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
7069 &fwk_DevCamDebug_aec_total_drc_gain, 1);
7070 }
7071 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
7072 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
7073 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
7074 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
7075 &fwk_DevCamDebug_aec_color_drc_gain, 1);
7076 }
7077 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
7078 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
7079 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
7080 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
7081 }
7082 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
7083 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
7084 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
7085 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
7086 }
7087 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
7088 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
7089 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
7090 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
7091 }
7092 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
7093 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
7094 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
7095 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
7096 }
Samuel Habdf4fac2017-07-28 17:21:18 -07007097 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
7098 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
7099 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
7100 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
7101 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
7102 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
7103 }
7104 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
7105 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
7106 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
7107 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
7108 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
7109 }
7110 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
7111 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
7112 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
7113 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
7114 &fwk_DevCamDebug_aec_subject_motion, 1);
7115 }
Samuel Ha68ba5172016-12-15 18:41:12 -08007116 // DevCamDebug metadata translateFromHalMetadata AWB
7117 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
7118 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
7119 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
7120 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
7121 }
7122 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7123 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7124 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7125 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7126 }
7127 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7128 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7129 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7130 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7131 }
7132 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7133 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7134 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7135 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7136 }
7137 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7138 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7139 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7140 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7141 }
7142 }
7143 // atrace_end(ATRACE_TAG_ALWAYS);
7144
Thierry Strudel3d639192016-09-09 11:52:26 -07007145 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7146 int64_t fwk_frame_number = *frame_number;
7147 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7148 }
7149
7150 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7151 int32_t fps_range[2];
7152 fps_range[0] = (int32_t)float_range->min_fps;
7153 fps_range[1] = (int32_t)float_range->max_fps;
7154 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7155 fps_range, 2);
7156 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7157 fps_range[0], fps_range[1]);
7158 }
7159
7160 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7161 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7162 }
7163
7164 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7165 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7166 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7167 *sceneMode);
7168 if (NAME_NOT_FOUND != val) {
7169 uint8_t fwkSceneMode = (uint8_t)val;
7170 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7171 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7172 fwkSceneMode);
7173 }
7174 }
7175
7176 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7177 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7178 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7179 }
7180
7181 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7182 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7183 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7184 }
7185
7186 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7187 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7188 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7189 }
7190
7191 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7192 CAM_INTF_META_EDGE_MODE, metadata) {
7193 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7194 }
7195
7196 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7197 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7198 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7199 }
7200
7201 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7202 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7203 }
7204
7205 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7206 if (0 <= *flashState) {
7207 uint8_t fwk_flashState = (uint8_t) *flashState;
7208 if (!gCamCapability[mCameraId]->flash_available) {
7209 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7210 }
7211 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7212 }
7213 }
7214
7215 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7216 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7217 if (NAME_NOT_FOUND != val) {
7218 uint8_t fwk_flashMode = (uint8_t)val;
7219 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7220 }
7221 }
7222
7223 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7224 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7225 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7226 }
7227
7228 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7229 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7230 }
7231
7232 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7233 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7234 }
7235
7236 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7237 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7238 }
7239
7240 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7241 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7242 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7243 }
7244
7245 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7246 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7247 LOGD("fwk_videoStab = %d", fwk_videoStab);
7248 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7249 } else {
7250 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7251 // and so hardcoding the Video Stab result to OFF mode.
7252 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7253 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007254 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007255 }
7256
7257 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7258 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7259 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7260 }
7261
7262 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7263 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7264 }
7265
Thierry Strudel3d639192016-09-09 11:52:26 -07007266 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7267 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007268 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007269
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007270 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7271 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007272
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007273 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007274 blackLevelAppliedPattern->cam_black_level[0],
7275 blackLevelAppliedPattern->cam_black_level[1],
7276 blackLevelAppliedPattern->cam_black_level[2],
7277 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007278 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7279 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007280
7281#ifndef USE_HAL_3_3
7282 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307283 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007284 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307285 fwk_blackLevelInd[0] /= 16.0;
7286 fwk_blackLevelInd[1] /= 16.0;
7287 fwk_blackLevelInd[2] /= 16.0;
7288 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007289 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7290 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007291#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007292 }
7293
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007294#ifndef USE_HAL_3_3
7295 // Fixed whitelevel is used by ISP/Sensor
7296 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7297 &gCamCapability[mCameraId]->white_level, 1);
7298#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007299
7300 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7301 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7302 int32_t scalerCropRegion[4];
7303 scalerCropRegion[0] = hScalerCropRegion->left;
7304 scalerCropRegion[1] = hScalerCropRegion->top;
7305 scalerCropRegion[2] = hScalerCropRegion->width;
7306 scalerCropRegion[3] = hScalerCropRegion->height;
7307
7308 // Adjust crop region from sensor output coordinate system to active
7309 // array coordinate system.
7310 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7311 scalerCropRegion[2], scalerCropRegion[3]);
7312
7313 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7314 }
7315
7316 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7317 LOGD("sensorExpTime = %lld", *sensorExpTime);
7318 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7319 }
7320
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007321 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7322 LOGD("expTimeBoost = %f", *expTimeBoost);
7323 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7324 }
7325
Thierry Strudel3d639192016-09-09 11:52:26 -07007326 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7327 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7328 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7329 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7330 }
7331
7332 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7333 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7334 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7335 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7336 sensorRollingShutterSkew, 1);
7337 }
7338
7339 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7340 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7341 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7342
7343 //calculate the noise profile based on sensitivity
7344 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7345 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7346 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7347 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7348 noise_profile[i] = noise_profile_S;
7349 noise_profile[i+1] = noise_profile_O;
7350 }
7351 LOGD("noise model entry (S, O) is (%f, %f)",
7352 noise_profile_S, noise_profile_O);
7353 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7354 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7355 }
7356
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007357#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007358 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007359 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007360 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007361 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007362 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7363 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7364 }
7365 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007366#endif
7367
Thierry Strudel3d639192016-09-09 11:52:26 -07007368 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7369 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7370 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7371 }
7372
7373 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7374 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7375 *faceDetectMode);
7376 if (NAME_NOT_FOUND != val) {
7377 uint8_t fwk_faceDetectMode = (uint8_t)val;
7378 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7379
7380 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7381 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7382 CAM_INTF_META_FACE_DETECTION, metadata) {
7383 uint8_t numFaces = MIN(
7384 faceDetectionInfo->num_faces_detected, MAX_ROI);
7385 int32_t faceIds[MAX_ROI];
7386 uint8_t faceScores[MAX_ROI];
7387 int32_t faceRectangles[MAX_ROI * 4];
7388 int32_t faceLandmarks[MAX_ROI * 6];
7389 size_t j = 0, k = 0;
7390
7391 for (size_t i = 0; i < numFaces; i++) {
7392 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7393 // Adjust crop region from sensor output coordinate system to active
7394 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007395 cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
Thierry Strudel3d639192016-09-09 11:52:26 -07007396 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7397 rect.width, rect.height);
7398
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007399 convertToRegions(rect, faceRectangles+j, -1);
Thierry Strudel3d639192016-09-09 11:52:26 -07007400
Jason Lee8ce36fa2017-04-19 19:40:37 -07007401 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7402 "bottom-right (%d, %d)",
7403 faceDetectionInfo->frame_id, i,
7404 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7405 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7406
Thierry Strudel3d639192016-09-09 11:52:26 -07007407 j+= 4;
7408 }
7409 if (numFaces <= 0) {
7410 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7411 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7412 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7413 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7414 }
7415
7416 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7417 numFaces);
7418 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7419 faceRectangles, numFaces * 4U);
7420 if (fwk_faceDetectMode ==
7421 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7422 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7423 CAM_INTF_META_FACE_LANDMARK, metadata) {
7424
7425 for (size_t i = 0; i < numFaces; i++) {
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007426 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
Thierry Strudel3d639192016-09-09 11:52:26 -07007427 // Map the co-ordinate sensor output coordinate system to active
7428 // array coordinate system.
7429 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007430 face_landmarks.left_eye_center.x,
7431 face_landmarks.left_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007432 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007433 face_landmarks.right_eye_center.x,
7434 face_landmarks.right_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007435 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007436 face_landmarks.mouth_center.x,
7437 face_landmarks.mouth_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007438
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007439 convertLandmarks(face_landmarks, faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007440
7441 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7442 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7443 faceDetectionInfo->frame_id, i,
7444 faceLandmarks[k + LEFT_EYE_X],
7445 faceLandmarks[k + LEFT_EYE_Y],
7446 faceLandmarks[k + RIGHT_EYE_X],
7447 faceLandmarks[k + RIGHT_EYE_Y],
7448 faceLandmarks[k + MOUTH_X],
7449 faceLandmarks[k + MOUTH_Y]);
7450
Thierry Strudel04e026f2016-10-10 11:27:36 -07007451 k+= TOTAL_LANDMARK_INDICES;
7452 }
7453 } else {
7454 for (size_t i = 0; i < numFaces; i++) {
7455 setInvalidLandmarks(faceLandmarks+k);
7456 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007457 }
7458 }
7459
Jason Lee49619db2017-04-13 12:07:22 -07007460 for (size_t i = 0; i < numFaces; i++) {
7461 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7462
7463 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7464 faceDetectionInfo->frame_id, i, faceIds[i]);
7465 }
7466
Thierry Strudel3d639192016-09-09 11:52:26 -07007467 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7468 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7469 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007470 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007471 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7472 CAM_INTF_META_FACE_BLINK, metadata) {
7473 uint8_t detected[MAX_ROI];
7474 uint8_t degree[MAX_ROI * 2];
7475 for (size_t i = 0; i < numFaces; i++) {
7476 detected[i] = blinks->blink[i].blink_detected;
7477 degree[2 * i] = blinks->blink[i].left_blink;
7478 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007479
Jason Lee49619db2017-04-13 12:07:22 -07007480 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7481 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7482 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7483 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007484 }
7485 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7486 detected, numFaces);
7487 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7488 degree, numFaces * 2);
7489 }
7490 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7491 CAM_INTF_META_FACE_SMILE, metadata) {
7492 uint8_t degree[MAX_ROI];
7493 uint8_t confidence[MAX_ROI];
7494 for (size_t i = 0; i < numFaces; i++) {
7495 degree[i] = smiles->smile[i].smile_degree;
7496 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007497
Jason Lee49619db2017-04-13 12:07:22 -07007498 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7499 "smile_degree=%d, smile_score=%d",
7500 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007501 }
7502 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7503 degree, numFaces);
7504 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7505 confidence, numFaces);
7506 }
7507 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7508 CAM_INTF_META_FACE_GAZE, metadata) {
7509 int8_t angle[MAX_ROI];
7510 int32_t direction[MAX_ROI * 3];
7511 int8_t degree[MAX_ROI * 2];
7512 for (size_t i = 0; i < numFaces; i++) {
7513 angle[i] = gazes->gaze[i].gaze_angle;
7514 direction[3 * i] = gazes->gaze[i].updown_dir;
7515 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7516 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7517 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7518 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007519
7520 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7521 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7522 "left_right_gaze=%d, top_bottom_gaze=%d",
7523 faceDetectionInfo->frame_id, i, angle[i],
7524 direction[3 * i], direction[3 * i + 1],
7525 direction[3 * i + 2],
7526 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007527 }
7528 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7529 (uint8_t *)angle, numFaces);
7530 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7531 direction, numFaces * 3);
7532 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7533 (uint8_t *)degree, numFaces * 2);
7534 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007535 }
7536 }
7537 }
7538 }
7539
7540 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7541 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007542 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007543 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007544 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007545
Shuzhen Wang14415f52016-11-16 18:26:18 -08007546 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7547 histogramBins = *histBins;
7548 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7549 }
7550
7551 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007552 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7553 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007554 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007555
7556 switch (stats_data->type) {
7557 case CAM_HISTOGRAM_TYPE_BAYER:
7558 switch (stats_data->bayer_stats.data_type) {
7559 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007560 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7561 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007562 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007563 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7564 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007565 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007566 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7567 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007568 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007569 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007570 case CAM_STATS_CHANNEL_R:
7571 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007572 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7573 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007574 }
7575 break;
7576 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007577 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007578 break;
7579 }
7580
Shuzhen Wang14415f52016-11-16 18:26:18 -08007581 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007582 }
7583 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007584 }
7585
7586 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7587 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7588 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7589 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7590 }
7591
7592 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7593 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7594 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7595 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7596 }
7597
7598 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7599 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7600 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7601 CAM_MAX_SHADING_MAP_HEIGHT);
7602 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7603 CAM_MAX_SHADING_MAP_WIDTH);
7604 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7605 lensShadingMap->lens_shading, 4U * map_width * map_height);
7606 }
7607
7608 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7609 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7610 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7611 }
7612
7613 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7614 //Populate CAM_INTF_META_TONEMAP_CURVES
7615 /* ch0 = G, ch 1 = B, ch 2 = R*/
7616 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7617 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7618 tonemap->tonemap_points_cnt,
7619 CAM_MAX_TONEMAP_CURVE_SIZE);
7620 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7621 }
7622
7623 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7624 &tonemap->curves[0].tonemap_points[0][0],
7625 tonemap->tonemap_points_cnt * 2);
7626
7627 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7628 &tonemap->curves[1].tonemap_points[0][0],
7629 tonemap->tonemap_points_cnt * 2);
7630
7631 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7632 &tonemap->curves[2].tonemap_points[0][0],
7633 tonemap->tonemap_points_cnt * 2);
7634 }
7635
7636 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7637 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7638 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7639 CC_GAIN_MAX);
7640 }
7641
7642 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7643 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7644 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7645 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7646 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7647 }
7648
7649 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7650 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7651 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7652 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7653 toneCurve->tonemap_points_cnt,
7654 CAM_MAX_TONEMAP_CURVE_SIZE);
7655 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7656 }
7657 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7658 (float*)toneCurve->curve.tonemap_points,
7659 toneCurve->tonemap_points_cnt * 2);
7660 }
7661
7662 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7663 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7664 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7665 predColorCorrectionGains->gains, 4);
7666 }
7667
7668 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7669 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7670 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7671 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7672 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7673 }
7674
7675 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7676 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7677 }
7678
7679 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7680 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7681 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7682 }
7683
7684 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7685 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7686 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7687 }
7688
7689 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7690 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7691 *effectMode);
7692 if (NAME_NOT_FOUND != val) {
7693 uint8_t fwk_effectMode = (uint8_t)val;
7694 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7695 }
7696 }
7697
7698 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7699 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7700 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7701 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7702 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7703 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7704 }
7705 int32_t fwk_testPatternData[4];
7706 fwk_testPatternData[0] = testPatternData->r;
7707 fwk_testPatternData[3] = testPatternData->b;
7708 switch (gCamCapability[mCameraId]->color_arrangement) {
7709 case CAM_FILTER_ARRANGEMENT_RGGB:
7710 case CAM_FILTER_ARRANGEMENT_GRBG:
7711 fwk_testPatternData[1] = testPatternData->gr;
7712 fwk_testPatternData[2] = testPatternData->gb;
7713 break;
7714 case CAM_FILTER_ARRANGEMENT_GBRG:
7715 case CAM_FILTER_ARRANGEMENT_BGGR:
7716 fwk_testPatternData[2] = testPatternData->gr;
7717 fwk_testPatternData[1] = testPatternData->gb;
7718 break;
7719 default:
7720 LOGE("color arrangement %d is not supported",
7721 gCamCapability[mCameraId]->color_arrangement);
7722 break;
7723 }
7724 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7725 }
7726
7727 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7728 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7729 }
7730
7731 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7732 String8 str((const char *)gps_methods);
7733 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7734 }
7735
7736 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7737 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7738 }
7739
7740 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7741 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7742 }
7743
7744 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7745 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7746 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7747 }
7748
7749 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7750 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7751 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7752 }
7753
7754 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7755 int32_t fwk_thumb_size[2];
7756 fwk_thumb_size[0] = thumb_size->width;
7757 fwk_thumb_size[1] = thumb_size->height;
7758 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7759 }
7760
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007761 // Skip reprocess metadata if there is no input stream.
7762 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7763 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7764 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7765 privateData,
7766 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7767 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007768 }
7769
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007770 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007771 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007772 meteringMode, 1);
7773 }
7774
Thierry Strudel54dc9782017-02-15 12:12:10 -08007775 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7776 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7777 LOGD("hdr_scene_data: %d %f\n",
7778 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7779 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7780 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7781 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7782 &isHdr, 1);
7783 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7784 &isHdrConfidence, 1);
7785 }
7786
7787
7788
Thierry Strudel3d639192016-09-09 11:52:26 -07007789 if (metadata->is_tuning_params_valid) {
7790 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7791 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7792 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7793
7794
7795 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7796 sizeof(uint32_t));
7797 data += sizeof(uint32_t);
7798
7799 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7800 sizeof(uint32_t));
7801 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7802 data += sizeof(uint32_t);
7803
7804 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7805 sizeof(uint32_t));
7806 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7807 data += sizeof(uint32_t);
7808
7809 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7810 sizeof(uint32_t));
7811 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7812 data += sizeof(uint32_t);
7813
7814 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7815 sizeof(uint32_t));
7816 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7817 data += sizeof(uint32_t);
7818
7819 metadata->tuning_params.tuning_mod3_data_size = 0;
7820 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7821 sizeof(uint32_t));
7822 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7823 data += sizeof(uint32_t);
7824
7825 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7826 TUNING_SENSOR_DATA_MAX);
7827 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7828 count);
7829 data += count;
7830
7831 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7832 TUNING_VFE_DATA_MAX);
7833 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7834 count);
7835 data += count;
7836
7837 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7838 TUNING_CPP_DATA_MAX);
7839 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7840 count);
7841 data += count;
7842
7843 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7844 TUNING_CAC_DATA_MAX);
7845 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7846 count);
7847 data += count;
7848
7849 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7850 (int32_t *)(void *)tuning_meta_data_blob,
7851 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7852 }
7853
7854 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7855 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7856 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7857 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7858 NEUTRAL_COL_POINTS);
7859 }
7860
7861 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7862 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7863 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7864 }
7865
7866 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7867 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7868 // Adjust crop region from sensor output coordinate system to active
7869 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007870 cam_rect_t hAeRect = hAeRegions->rect;
7871 mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7872 hAeRect.width, hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007873
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007874 convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
Thierry Strudel3d639192016-09-09 11:52:26 -07007875 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7876 REGIONS_TUPLE_COUNT);
7877 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7878 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007879 hAeRect.left, hAeRect.top, hAeRect.width,
7880 hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007881 }
7882
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007883 if (!pendingRequest.focusStateSent) {
7884 if (pendingRequest.focusStateValid) {
7885 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7886 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007887 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007888 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7889 uint8_t fwk_afState = (uint8_t) *afState;
7890 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7891 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7892 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007893 }
7894 }
7895
Thierry Strudel3d639192016-09-09 11:52:26 -07007896 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7897 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7898 }
7899
7900 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7901 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7902 }
7903
7904 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7905 uint8_t fwk_lensState = *lensState;
7906 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7907 }
7908
Thierry Strudel3d639192016-09-09 11:52:26 -07007909 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007910 uint32_t ab_mode = *hal_ab_mode;
7911 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7912 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7913 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7914 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007915 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007916 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007917 if (NAME_NOT_FOUND != val) {
7918 uint8_t fwk_ab_mode = (uint8_t)val;
7919 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7920 }
7921 }
7922
7923 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7924 int val = lookupFwkName(SCENE_MODES_MAP,
7925 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7926 if (NAME_NOT_FOUND != val) {
7927 uint8_t fwkBestshotMode = (uint8_t)val;
7928 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7929 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7930 } else {
7931 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7932 }
7933 }
7934
7935 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7936 uint8_t fwk_mode = (uint8_t) *mode;
7937 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7938 }
7939
7940 /* Constant metadata values to be update*/
7941 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7942 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7943
7944 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7945 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7946
7947 int32_t hotPixelMap[2];
7948 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7949
7950 // CDS
7951 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7952 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7953 }
7954
Thierry Strudel04e026f2016-10-10 11:27:36 -07007955 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7956 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007957 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007958 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7959 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7960 } else {
7961 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7962 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007963
7964 if(fwk_hdr != curr_hdr_state) {
7965 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7966 if(fwk_hdr)
7967 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7968 else
7969 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7970 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007971 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7972 }
7973
Thierry Strudel54dc9782017-02-15 12:12:10 -08007974 //binning correction
7975 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7976 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7977 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7978 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7979 }
7980
Thierry Strudel04e026f2016-10-10 11:27:36 -07007981 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007982 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007983 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7984 int8_t is_ir_on = 0;
7985
7986 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7987 if(is_ir_on != curr_ir_state) {
7988 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7989 if(is_ir_on)
7990 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7991 else
7992 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7993 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007994 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007995 }
7996
Thierry Strudel269c81a2016-10-12 12:13:59 -07007997 // AEC SPEED
7998 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7999 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
8000 }
8001
8002 // AWB SPEED
8003 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
8004 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
8005 }
8006
Thierry Strudel3d639192016-09-09 11:52:26 -07008007 // TNR
8008 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
8009 uint8_t tnr_enable = tnr->denoise_enable;
8010 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08008011 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
8012 int8_t is_tnr_on = 0;
8013
8014 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
8015 if(is_tnr_on != curr_tnr_state) {
8016 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
8017 if(is_tnr_on)
8018 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
8019 else
8020 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
8021 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008022
8023 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
8024 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
8025 }
8026
8027 // Reprocess crop data
8028 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
8029 uint8_t cnt = crop_data->num_of_streams;
8030 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
8031 // mm-qcamera-daemon only posts crop_data for streams
8032 // not linked to pproc. So no valid crop metadata is not
8033 // necessarily an error case.
8034 LOGD("No valid crop metadata entries");
8035 } else {
8036 uint32_t reproc_stream_id;
8037 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8038 LOGD("No reprocessible stream found, ignore crop data");
8039 } else {
8040 int rc = NO_ERROR;
8041 Vector<int32_t> roi_map;
8042 int32_t *crop = new int32_t[cnt*4];
8043 if (NULL == crop) {
8044 rc = NO_MEMORY;
8045 }
8046 if (NO_ERROR == rc) {
8047 int32_t streams_found = 0;
8048 for (size_t i = 0; i < cnt; i++) {
8049 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
8050 if (pprocDone) {
8051 // HAL already does internal reprocessing,
8052 // either via reprocessing before JPEG encoding,
8053 // or offline postprocessing for pproc bypass case.
8054 crop[0] = 0;
8055 crop[1] = 0;
8056 crop[2] = mInputStreamInfo.dim.width;
8057 crop[3] = mInputStreamInfo.dim.height;
8058 } else {
8059 crop[0] = crop_data->crop_info[i].crop.left;
8060 crop[1] = crop_data->crop_info[i].crop.top;
8061 crop[2] = crop_data->crop_info[i].crop.width;
8062 crop[3] = crop_data->crop_info[i].crop.height;
8063 }
8064 roi_map.add(crop_data->crop_info[i].roi_map.left);
8065 roi_map.add(crop_data->crop_info[i].roi_map.top);
8066 roi_map.add(crop_data->crop_info[i].roi_map.width);
8067 roi_map.add(crop_data->crop_info[i].roi_map.height);
8068 streams_found++;
8069 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
8070 crop[0], crop[1], crop[2], crop[3]);
8071 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
8072 crop_data->crop_info[i].roi_map.left,
8073 crop_data->crop_info[i].roi_map.top,
8074 crop_data->crop_info[i].roi_map.width,
8075 crop_data->crop_info[i].roi_map.height);
8076 break;
8077
8078 }
8079 }
8080 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
8081 &streams_found, 1);
8082 camMetadata.update(QCAMERA3_CROP_REPROCESS,
8083 crop, (size_t)(streams_found * 4));
8084 if (roi_map.array()) {
8085 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
8086 roi_map.array(), roi_map.size());
8087 }
8088 }
8089 if (crop) {
8090 delete [] crop;
8091 }
8092 }
8093 }
8094 }
8095
8096 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8097 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
8098 // so hardcoding the CAC result to OFF mode.
8099 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8100 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
8101 } else {
8102 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
8103 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8104 *cacMode);
8105 if (NAME_NOT_FOUND != val) {
8106 uint8_t resultCacMode = (uint8_t)val;
8107 // check whether CAC result from CB is equal to Framework set CAC mode
8108 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008109 if (pendingRequest.fwkCacMode != resultCacMode) {
8110 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07008111 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08008112 //Check if CAC is disabled by property
8113 if (m_cacModeDisabled) {
8114 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8115 }
8116
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008117 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07008118 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
8119 } else {
8120 LOGE("Invalid CAC camera parameter: %d", *cacMode);
8121 }
8122 }
8123 }
8124
8125 // Post blob of cam_cds_data through vendor tag.
8126 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8127 uint8_t cnt = cdsInfo->num_of_streams;
8128 cam_cds_data_t cdsDataOverride;
8129 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8130 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8131 cdsDataOverride.num_of_streams = 1;
8132 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8133 uint32_t reproc_stream_id;
8134 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8135 LOGD("No reprocessible stream found, ignore cds data");
8136 } else {
8137 for (size_t i = 0; i < cnt; i++) {
8138 if (cdsInfo->cds_info[i].stream_id ==
8139 reproc_stream_id) {
8140 cdsDataOverride.cds_info[0].cds_enable =
8141 cdsInfo->cds_info[i].cds_enable;
8142 break;
8143 }
8144 }
8145 }
8146 } else {
8147 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8148 }
8149 camMetadata.update(QCAMERA3_CDS_INFO,
8150 (uint8_t *)&cdsDataOverride,
8151 sizeof(cam_cds_data_t));
8152 }
8153
8154 // Ldaf calibration data
8155 if (!mLdafCalibExist) {
8156 IF_META_AVAILABLE(uint32_t, ldafCalib,
8157 CAM_INTF_META_LDAF_EXIF, metadata) {
8158 mLdafCalibExist = true;
8159 mLdafCalib[0] = ldafCalib[0];
8160 mLdafCalib[1] = ldafCalib[1];
8161 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8162 ldafCalib[0], ldafCalib[1]);
8163 }
8164 }
8165
Thierry Strudel54dc9782017-02-15 12:12:10 -08008166 // EXIF debug data through vendor tag
8167 /*
8168 * Mobicat Mask can assume 3 values:
8169 * 1 refers to Mobicat data,
8170 * 2 refers to Stats Debug and Exif Debug Data
8171 * 3 refers to Mobicat and Stats Debug Data
8172 * We want to make sure that we are sending Exif debug data
8173 * only when Mobicat Mask is 2.
8174 */
8175 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8176 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8177 (uint8_t *)(void *)mExifParams.debug_params,
8178 sizeof(mm_jpeg_debug_exif_params_t));
8179 }
8180
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008181 // Reprocess and DDM debug data through vendor tag
8182 cam_reprocess_info_t repro_info;
8183 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008184 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8185 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008186 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008187 }
8188 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8189 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008190 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008191 }
8192 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8193 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008194 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008195 }
8196 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8197 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008198 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008199 }
8200 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8201 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008202 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008203 }
8204 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008205 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008206 }
8207 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8208 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008209 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008210 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008211 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8212 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8213 }
8214 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8215 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8216 }
8217 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8218 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008219
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008220 // INSTANT AEC MODE
8221 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8222 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8223 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8224 }
8225
Shuzhen Wange763e802016-03-31 10:24:29 -07008226 // AF scene change
8227 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8228 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8229 }
8230
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008231 // Enable ZSL
8232 if (enableZsl != nullptr) {
8233 uint8_t value = *enableZsl ?
8234 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8235 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8236 }
8237
Xu Han821ea9c2017-05-23 09:00:40 -07008238 // OIS Data
8239 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
Xu Han821ea9c2017-05-23 09:00:40 -07008240 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8241 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8242 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8243 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
Xue Tu2c3e9142017-08-18 16:23:52 -07008244 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8245 frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8246 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8247 frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
Xu Han821ea9c2017-05-23 09:00:40 -07008248 }
8249
Thierry Strudel3d639192016-09-09 11:52:26 -07008250 resultMetadata = camMetadata.release();
8251 return resultMetadata;
8252}
8253
8254/*===========================================================================
8255 * FUNCTION : saveExifParams
8256 *
8257 * DESCRIPTION:
8258 *
8259 * PARAMETERS :
8260 * @metadata : metadata information from callback
8261 *
8262 * RETURN : none
8263 *
8264 *==========================================================================*/
8265void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8266{
8267 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8268 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8269 if (mExifParams.debug_params) {
8270 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8271 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8272 }
8273 }
8274 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8275 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8276 if (mExifParams.debug_params) {
8277 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8278 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8279 }
8280 }
8281 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8282 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8283 if (mExifParams.debug_params) {
8284 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8285 mExifParams.debug_params->af_debug_params_valid = TRUE;
8286 }
8287 }
8288 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8289 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8290 if (mExifParams.debug_params) {
8291 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8292 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8293 }
8294 }
8295 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8296 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8297 if (mExifParams.debug_params) {
8298 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8299 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8300 }
8301 }
8302 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8303 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8304 if (mExifParams.debug_params) {
8305 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8306 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8307 }
8308 }
8309 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8310 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8311 if (mExifParams.debug_params) {
8312 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8313 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8314 }
8315 }
8316 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8317 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8318 if (mExifParams.debug_params) {
8319 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8320 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8321 }
8322 }
8323}
8324
8325/*===========================================================================
8326 * FUNCTION : get3AExifParams
8327 *
8328 * DESCRIPTION:
8329 *
8330 * PARAMETERS : none
8331 *
8332 *
8333 * RETURN : mm_jpeg_exif_params_t
8334 *
8335 *==========================================================================*/
8336mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8337{
8338 return mExifParams;
8339}
8340
8341/*===========================================================================
8342 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8343 *
8344 * DESCRIPTION:
8345 *
8346 * PARAMETERS :
8347 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008348 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8349 * urgent metadata in a batch. Always true for
8350 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008351 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008352 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8353 * i.e. even though it doesn't map to a valid partial
8354 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008355 * RETURN : camera_metadata_t*
8356 * metadata in a format specified by fwk
8357 *==========================================================================*/
8358camera_metadata_t*
8359QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008360 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008361 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008362{
8363 CameraMetadata camMetadata;
8364 camera_metadata_t *resultMetadata;
8365
Shuzhen Wang485e2442017-08-02 12:21:08 -07008366 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008367 /* In batch mode, use empty metadata if this is not the last in batch
8368 */
8369 resultMetadata = allocate_camera_metadata(0, 0);
8370 return resultMetadata;
8371 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008372
8373 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8374 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8375 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8376 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8377 }
8378
8379 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8380 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8381 &aecTrigger->trigger, 1);
8382 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8383 &aecTrigger->trigger_id, 1);
8384 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8385 aecTrigger->trigger);
8386 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8387 aecTrigger->trigger_id);
8388 }
8389
8390 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8391 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8392 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8393 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8394 }
8395
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008396 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8397 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8398 if (NAME_NOT_FOUND != val) {
8399 uint8_t fwkAfMode = (uint8_t)val;
8400 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8401 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8402 } else {
8403 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8404 val);
8405 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008406 }
8407
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008408 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8409 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8410 af_trigger->trigger);
8411 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8412 af_trigger->trigger_id);
8413
8414 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8415 mAfTrigger = *af_trigger;
8416 uint32_t fwk_AfState = (uint32_t) *afState;
8417
8418 // If this is the result for a new trigger, check if there is new early
8419 // af state. If there is, use the last af state for all results
8420 // preceding current partial frame number.
8421 for (auto & pendingRequest : mPendingRequestsList) {
8422 if (pendingRequest.frame_number < frame_number) {
8423 pendingRequest.focusStateValid = true;
8424 pendingRequest.focusState = fwk_AfState;
8425 } else if (pendingRequest.frame_number == frame_number) {
8426 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8427 // Check if early AF state for trigger exists. If yes, send AF state as
8428 // partial result for better latency.
8429 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8430 pendingRequest.focusStateSent = true;
8431 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8432 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8433 frame_number, fwkEarlyAfState);
8434 }
8435 }
8436 }
8437 }
8438 }
8439 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8440 &mAfTrigger.trigger, 1);
8441 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8442
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008443 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8444 /*af regions*/
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008445 cam_rect_t hAfRect = hAfRegions->rect;
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008446 int32_t afRegions[REGIONS_TUPLE_COUNT];
8447 // Adjust crop region from sensor output coordinate system to active
8448 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008449 mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8450 hAfRect.width, hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008451
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008452 convertToRegions(hAfRect, afRegions, hAfRegions->weight);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008453 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8454 REGIONS_TUPLE_COUNT);
8455 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8456 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008457 hAfRect.left, hAfRect.top, hAfRect.width,
8458 hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008459 }
8460
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008461 // AF region confidence
8462 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8463 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8464 }
8465
Thierry Strudel3d639192016-09-09 11:52:26 -07008466 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8467 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8468 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8469 if (NAME_NOT_FOUND != val) {
8470 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8471 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8472 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8473 } else {
8474 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8475 }
8476 }
8477
8478 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8479 uint32_t aeMode = CAM_AE_MODE_MAX;
8480 int32_t flashMode = CAM_FLASH_MODE_MAX;
8481 int32_t redeye = -1;
8482 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8483 aeMode = *pAeMode;
8484 }
8485 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8486 flashMode = *pFlashMode;
8487 }
8488 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8489 redeye = *pRedeye;
8490 }
8491
8492 if (1 == redeye) {
8493 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8494 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8495 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8496 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8497 flashMode);
8498 if (NAME_NOT_FOUND != val) {
8499 fwk_aeMode = (uint8_t)val;
8500 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8501 } else {
8502 LOGE("Unsupported flash mode %d", flashMode);
8503 }
8504 } else if (aeMode == CAM_AE_MODE_ON) {
8505 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8506 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8507 } else if (aeMode == CAM_AE_MODE_OFF) {
8508 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8509 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008510 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8511 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8512 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008513 } else {
8514 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8515 "flashMode:%d, aeMode:%u!!!",
8516 redeye, flashMode, aeMode);
8517 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008518 if (mInstantAEC) {
8519 // Increment frame Idx count untill a bound reached for instant AEC.
8520 mInstantAecFrameIdxCount++;
8521 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8522 CAM_INTF_META_AEC_INFO, metadata) {
8523 LOGH("ae_params->settled = %d",ae_params->settled);
8524 // If AEC settled, or if number of frames reached bound value,
8525 // should reset instant AEC.
8526 if (ae_params->settled ||
8527 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8528 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8529 mInstantAEC = false;
8530 mResetInstantAEC = true;
8531 mInstantAecFrameIdxCount = 0;
8532 }
8533 }
8534 }
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07008535
8536 IF_META_AVAILABLE(int32_t, af_tof_confidence,
8537 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
8538 IF_META_AVAILABLE(int32_t, af_tof_distance,
8539 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
8540 int32_t fwk_af_tof_confidence = *af_tof_confidence;
8541 int32_t fwk_af_tof_distance = *af_tof_distance;
8542 if (fwk_af_tof_confidence == 1) {
8543 mSceneDistance = fwk_af_tof_distance;
8544 } else {
8545 mSceneDistance = -1;
8546 }
8547 LOGD("tof_distance %d, tof_confidence %d, mSceneDistance %d",
8548 fwk_af_tof_distance, fwk_af_tof_confidence, mSceneDistance);
8549 }
8550 }
8551 camMetadata.update(NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE, &mSceneDistance, 1);
8552
Thierry Strudel3d639192016-09-09 11:52:26 -07008553 resultMetadata = camMetadata.release();
8554 return resultMetadata;
8555}
8556
8557/*===========================================================================
8558 * FUNCTION : dumpMetadataToFile
8559 *
8560 * DESCRIPTION: Dumps tuning metadata to file system
8561 *
8562 * PARAMETERS :
8563 * @meta : tuning metadata
8564 * @dumpFrameCount : current dump frame count
8565 * @enabled : Enable mask
8566 *
8567 *==========================================================================*/
8568void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8569 uint32_t &dumpFrameCount,
8570 bool enabled,
8571 const char *type,
8572 uint32_t frameNumber)
8573{
8574 //Some sanity checks
8575 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8576 LOGE("Tuning sensor data size bigger than expected %d: %d",
8577 meta.tuning_sensor_data_size,
8578 TUNING_SENSOR_DATA_MAX);
8579 return;
8580 }
8581
8582 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8583 LOGE("Tuning VFE data size bigger than expected %d: %d",
8584 meta.tuning_vfe_data_size,
8585 TUNING_VFE_DATA_MAX);
8586 return;
8587 }
8588
8589 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8590 LOGE("Tuning CPP data size bigger than expected %d: %d",
8591 meta.tuning_cpp_data_size,
8592 TUNING_CPP_DATA_MAX);
8593 return;
8594 }
8595
8596 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8597 LOGE("Tuning CAC data size bigger than expected %d: %d",
8598 meta.tuning_cac_data_size,
8599 TUNING_CAC_DATA_MAX);
8600 return;
8601 }
8602 //
8603
8604 if(enabled){
8605 char timeBuf[FILENAME_MAX];
8606 char buf[FILENAME_MAX];
8607 memset(buf, 0, sizeof(buf));
8608 memset(timeBuf, 0, sizeof(timeBuf));
8609 time_t current_time;
8610 struct tm * timeinfo;
8611 time (&current_time);
8612 timeinfo = localtime (&current_time);
8613 if (timeinfo != NULL) {
8614 strftime (timeBuf, sizeof(timeBuf),
8615 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8616 }
8617 String8 filePath(timeBuf);
8618 snprintf(buf,
8619 sizeof(buf),
8620 "%dm_%s_%d.bin",
8621 dumpFrameCount,
8622 type,
8623 frameNumber);
8624 filePath.append(buf);
8625 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8626 if (file_fd >= 0) {
8627 ssize_t written_len = 0;
8628 meta.tuning_data_version = TUNING_DATA_VERSION;
8629 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8630 written_len += write(file_fd, data, sizeof(uint32_t));
8631 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8632 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8633 written_len += write(file_fd, data, sizeof(uint32_t));
8634 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8635 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8636 written_len += write(file_fd, data, sizeof(uint32_t));
8637 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8638 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8639 written_len += write(file_fd, data, sizeof(uint32_t));
8640 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8641 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8642 written_len += write(file_fd, data, sizeof(uint32_t));
8643 meta.tuning_mod3_data_size = 0;
8644 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8645 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8646 written_len += write(file_fd, data, sizeof(uint32_t));
8647 size_t total_size = meta.tuning_sensor_data_size;
8648 data = (void *)((uint8_t *)&meta.data);
8649 written_len += write(file_fd, data, total_size);
8650 total_size = meta.tuning_vfe_data_size;
8651 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8652 written_len += write(file_fd, data, total_size);
8653 total_size = meta.tuning_cpp_data_size;
8654 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8655 written_len += write(file_fd, data, total_size);
8656 total_size = meta.tuning_cac_data_size;
8657 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8658 written_len += write(file_fd, data, total_size);
8659 close(file_fd);
8660 }else {
8661 LOGE("fail to open file for metadata dumping");
8662 }
8663 }
8664}
8665
8666/*===========================================================================
8667 * FUNCTION : cleanAndSortStreamInfo
8668 *
8669 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8670 * and sort them such that raw stream is at the end of the list
8671 * This is a workaround for camera daemon constraint.
8672 *
8673 * PARAMETERS : None
8674 *
8675 *==========================================================================*/
8676void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8677{
8678 List<stream_info_t *> newStreamInfo;
8679
8680 /*clean up invalid streams*/
8681 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8682 it != mStreamInfo.end();) {
8683 if(((*it)->status) == INVALID){
8684 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8685 delete channel;
8686 free(*it);
8687 it = mStreamInfo.erase(it);
8688 } else {
8689 it++;
8690 }
8691 }
8692
8693 // Move preview/video/callback/snapshot streams into newList
8694 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8695 it != mStreamInfo.end();) {
8696 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8697 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8698 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8699 newStreamInfo.push_back(*it);
8700 it = mStreamInfo.erase(it);
8701 } else
8702 it++;
8703 }
8704 // Move raw streams into newList
8705 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8706 it != mStreamInfo.end();) {
8707 newStreamInfo.push_back(*it);
8708 it = mStreamInfo.erase(it);
8709 }
8710
8711 mStreamInfo = newStreamInfo;
Chien-Yu Chen3d836272017-09-20 11:10:21 -07008712
8713 // Make sure that stream IDs are unique.
8714 uint32_t id = 0;
8715 for (auto streamInfo : mStreamInfo) {
8716 streamInfo->id = id++;
8717 }
8718
Thierry Strudel3d639192016-09-09 11:52:26 -07008719}
8720
8721/*===========================================================================
8722 * FUNCTION : extractJpegMetadata
8723 *
8724 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8725 * JPEG metadata is cached in HAL, and return as part of capture
8726 * result when metadata is returned from camera daemon.
8727 *
8728 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8729 * @request: capture request
8730 *
8731 *==========================================================================*/
8732void QCamera3HardwareInterface::extractJpegMetadata(
8733 CameraMetadata& jpegMetadata,
8734 const camera3_capture_request_t *request)
8735{
8736 CameraMetadata frame_settings;
8737 frame_settings = request->settings;
8738
8739 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8740 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8741 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8742 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8743
8744 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8745 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8746 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8747 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8748
8749 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8750 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8751 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8752 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8753
8754 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8755 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8756 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8757 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8758
8759 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8760 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8761 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8762 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8763
8764 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8765 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8766 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8767 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8768
8769 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8770 int32_t thumbnail_size[2];
8771 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8772 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8773 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8774 int32_t orientation =
8775 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008776 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008777 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8778 int32_t temp;
8779 temp = thumbnail_size[0];
8780 thumbnail_size[0] = thumbnail_size[1];
8781 thumbnail_size[1] = temp;
8782 }
8783 }
8784 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8785 thumbnail_size,
8786 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8787 }
8788
8789}
8790
8791/*===========================================================================
8792 * FUNCTION : convertToRegions
8793 *
8794 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8795 *
8796 * PARAMETERS :
8797 * @rect : cam_rect_t struct to convert
8798 * @region : int32_t destination array
8799 * @weight : if we are converting from cam_area_t, weight is valid
8800 * else weight = -1
8801 *
8802 *==========================================================================*/
8803void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8804 int32_t *region, int weight)
8805{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008806 region[FACE_LEFT] = rect.left;
8807 region[FACE_TOP] = rect.top;
8808 region[FACE_RIGHT] = rect.left + rect.width;
8809 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008810 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008811 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008812 }
8813}
8814
8815/*===========================================================================
8816 * FUNCTION : convertFromRegions
8817 *
8818 * DESCRIPTION: helper method to convert from array to cam_rect_t
8819 *
8820 * PARAMETERS :
8821 * @rect : cam_rect_t struct to convert
8822 * @region : int32_t destination array
8823 * @weight : if we are converting from cam_area_t, weight is valid
8824 * else weight = -1
8825 *
8826 *==========================================================================*/
8827void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008828 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008829{
Thierry Strudel3d639192016-09-09 11:52:26 -07008830 int32_t x_min = frame_settings.find(tag).data.i32[0];
8831 int32_t y_min = frame_settings.find(tag).data.i32[1];
8832 int32_t x_max = frame_settings.find(tag).data.i32[2];
8833 int32_t y_max = frame_settings.find(tag).data.i32[3];
8834 roi.weight = frame_settings.find(tag).data.i32[4];
8835 roi.rect.left = x_min;
8836 roi.rect.top = y_min;
8837 roi.rect.width = x_max - x_min;
8838 roi.rect.height = y_max - y_min;
8839}
8840
8841/*===========================================================================
8842 * FUNCTION : resetIfNeededROI
8843 *
8844 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8845 * crop region
8846 *
8847 * PARAMETERS :
8848 * @roi : cam_area_t struct to resize
8849 * @scalerCropRegion : cam_crop_region_t region to compare against
8850 *
8851 *
8852 *==========================================================================*/
8853bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8854 const cam_crop_region_t* scalerCropRegion)
8855{
8856 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8857 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8858 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8859 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8860
8861 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8862 * without having this check the calculations below to validate if the roi
8863 * is inside scalar crop region will fail resulting in the roi not being
8864 * reset causing algorithm to continue to use stale roi window
8865 */
8866 if (roi->weight == 0) {
8867 return true;
8868 }
8869
8870 if ((roi_x_max < scalerCropRegion->left) ||
8871 // right edge of roi window is left of scalar crop's left edge
8872 (roi_y_max < scalerCropRegion->top) ||
8873 // bottom edge of roi window is above scalar crop's top edge
8874 (roi->rect.left > crop_x_max) ||
8875 // left edge of roi window is beyond(right) of scalar crop's right edge
8876 (roi->rect.top > crop_y_max)){
8877 // top edge of roi windo is above scalar crop's top edge
8878 return false;
8879 }
8880 if (roi->rect.left < scalerCropRegion->left) {
8881 roi->rect.left = scalerCropRegion->left;
8882 }
8883 if (roi->rect.top < scalerCropRegion->top) {
8884 roi->rect.top = scalerCropRegion->top;
8885 }
8886 if (roi_x_max > crop_x_max) {
8887 roi_x_max = crop_x_max;
8888 }
8889 if (roi_y_max > crop_y_max) {
8890 roi_y_max = crop_y_max;
8891 }
8892 roi->rect.width = roi_x_max - roi->rect.left;
8893 roi->rect.height = roi_y_max - roi->rect.top;
8894 return true;
8895}
8896
8897/*===========================================================================
8898 * FUNCTION : convertLandmarks
8899 *
8900 * DESCRIPTION: helper method to extract the landmarks from face detection info
8901 *
8902 * PARAMETERS :
8903 * @landmark_data : input landmark data to be converted
8904 * @landmarks : int32_t destination array
8905 *
8906 *
8907 *==========================================================================*/
8908void QCamera3HardwareInterface::convertLandmarks(
8909 cam_face_landmarks_info_t landmark_data,
8910 int32_t *landmarks)
8911{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008912 if (landmark_data.is_left_eye_valid) {
8913 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8914 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8915 } else {
8916 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8917 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8918 }
8919
8920 if (landmark_data.is_right_eye_valid) {
8921 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8922 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8923 } else {
8924 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8925 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8926 }
8927
8928 if (landmark_data.is_mouth_valid) {
8929 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8930 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8931 } else {
8932 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8933 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8934 }
8935}
8936
8937/*===========================================================================
8938 * FUNCTION : setInvalidLandmarks
8939 *
8940 * DESCRIPTION: helper method to set invalid landmarks
8941 *
8942 * PARAMETERS :
8943 * @landmarks : int32_t destination array
8944 *
8945 *
8946 *==========================================================================*/
8947void QCamera3HardwareInterface::setInvalidLandmarks(
8948 int32_t *landmarks)
8949{
8950 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8951 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8952 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8953 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8954 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8955 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008956}
8957
8958#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008959
8960/*===========================================================================
8961 * FUNCTION : getCapabilities
8962 *
8963 * DESCRIPTION: query camera capability from back-end
8964 *
8965 * PARAMETERS :
8966 * @ops : mm-interface ops structure
8967 * @cam_handle : camera handle for which we need capability
8968 *
8969 * RETURN : ptr type of capability structure
8970 * capability for success
8971 * NULL for failure
8972 *==========================================================================*/
8973cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8974 uint32_t cam_handle)
8975{
8976 int rc = NO_ERROR;
8977 QCamera3HeapMemory *capabilityHeap = NULL;
8978 cam_capability_t *cap_ptr = NULL;
8979
8980 if (ops == NULL) {
8981 LOGE("Invalid arguments");
8982 return NULL;
8983 }
8984
8985 capabilityHeap = new QCamera3HeapMemory(1);
8986 if (capabilityHeap == NULL) {
8987 LOGE("creation of capabilityHeap failed");
8988 return NULL;
8989 }
8990
8991 /* Allocate memory for capability buffer */
8992 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8993 if(rc != OK) {
8994 LOGE("No memory for cappability");
8995 goto allocate_failed;
8996 }
8997
8998 /* Map memory for capability buffer */
8999 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
9000
9001 rc = ops->map_buf(cam_handle,
9002 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
9003 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
9004 if(rc < 0) {
9005 LOGE("failed to map capability buffer");
9006 rc = FAILED_TRANSACTION;
9007 goto map_failed;
9008 }
9009
9010 /* Query Capability */
9011 rc = ops->query_capability(cam_handle);
9012 if(rc < 0) {
9013 LOGE("failed to query capability");
9014 rc = FAILED_TRANSACTION;
9015 goto query_failed;
9016 }
9017
9018 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
9019 if (cap_ptr == NULL) {
9020 LOGE("out of memory");
9021 rc = NO_MEMORY;
9022 goto query_failed;
9023 }
9024
9025 memset(cap_ptr, 0, sizeof(cam_capability_t));
9026 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
9027
9028 int index;
9029 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
9030 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
9031 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
9032 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
9033 }
9034
9035query_failed:
9036 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
9037map_failed:
9038 capabilityHeap->deallocate();
9039allocate_failed:
9040 delete capabilityHeap;
9041
9042 if (rc != NO_ERROR) {
9043 return NULL;
9044 } else {
9045 return cap_ptr;
9046 }
9047}
9048
Thierry Strudel3d639192016-09-09 11:52:26 -07009049/*===========================================================================
9050 * FUNCTION : initCapabilities
9051 *
9052 * DESCRIPTION: initialize camera capabilities in static data struct
9053 *
9054 * PARAMETERS :
9055 * @cameraId : camera Id
9056 *
9057 * RETURN : int32_t type of status
9058 * NO_ERROR -- success
9059 * none-zero failure code
9060 *==========================================================================*/
9061int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
9062{
9063 int rc = 0;
9064 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009065 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07009066
9067 rc = camera_open((uint8_t)cameraId, &cameraHandle);
9068 if (rc) {
9069 LOGE("camera_open failed. rc = %d", rc);
9070 goto open_failed;
9071 }
9072 if (!cameraHandle) {
9073 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
9074 goto open_failed;
9075 }
9076
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009077 handle = get_main_camera_handle(cameraHandle->camera_handle);
9078 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
9079 if (gCamCapability[cameraId] == NULL) {
9080 rc = FAILED_TRANSACTION;
9081 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07009082 }
9083
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009084 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009085 if (is_dual_camera_by_idx(cameraId)) {
9086 handle = get_aux_camera_handle(cameraHandle->camera_handle);
9087 gCamCapability[cameraId]->aux_cam_cap =
9088 getCapabilities(cameraHandle->ops, handle);
9089 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
9090 rc = FAILED_TRANSACTION;
9091 free(gCamCapability[cameraId]);
9092 goto failed_op;
9093 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009094
9095 // Copy the main camera capability to main_cam_cap struct
9096 gCamCapability[cameraId]->main_cam_cap =
9097 (cam_capability_t *)malloc(sizeof(cam_capability_t));
9098 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
9099 LOGE("out of memory");
9100 rc = NO_MEMORY;
9101 goto failed_op;
9102 }
9103 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
9104 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07009105 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009106failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07009107 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
9108 cameraHandle = NULL;
9109open_failed:
9110 return rc;
9111}
9112
9113/*==========================================================================
9114 * FUNCTION : get3Aversion
9115 *
9116 * DESCRIPTION: get the Q3A S/W version
9117 *
9118 * PARAMETERS :
9119 * @sw_version: Reference of Q3A structure which will hold version info upon
9120 * return
9121 *
9122 * RETURN : None
9123 *
9124 *==========================================================================*/
9125void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
9126{
9127 if(gCamCapability[mCameraId])
9128 sw_version = gCamCapability[mCameraId]->q3a_version;
9129 else
9130 LOGE("Capability structure NULL!");
9131}
9132
9133
9134/*===========================================================================
9135 * FUNCTION : initParameters
9136 *
9137 * DESCRIPTION: initialize camera parameters
9138 *
9139 * PARAMETERS :
9140 *
9141 * RETURN : int32_t type of status
9142 * NO_ERROR -- success
9143 * none-zero failure code
9144 *==========================================================================*/
9145int QCamera3HardwareInterface::initParameters()
9146{
9147 int rc = 0;
9148
9149 //Allocate Set Param Buffer
9150 mParamHeap = new QCamera3HeapMemory(1);
9151 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9152 if(rc != OK) {
9153 rc = NO_MEMORY;
9154 LOGE("Failed to allocate SETPARM Heap memory");
9155 delete mParamHeap;
9156 mParamHeap = NULL;
9157 return rc;
9158 }
9159
9160 //Map memory for parameters buffer
9161 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9162 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9163 mParamHeap->getFd(0),
9164 sizeof(metadata_buffer_t),
9165 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9166 if(rc < 0) {
9167 LOGE("failed to map SETPARM buffer");
9168 rc = FAILED_TRANSACTION;
9169 mParamHeap->deallocate();
9170 delete mParamHeap;
9171 mParamHeap = NULL;
9172 return rc;
9173 }
9174
9175 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9176
9177 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9178 return rc;
9179}
9180
9181/*===========================================================================
9182 * FUNCTION : deinitParameters
9183 *
9184 * DESCRIPTION: de-initialize camera parameters
9185 *
9186 * PARAMETERS :
9187 *
9188 * RETURN : NONE
9189 *==========================================================================*/
9190void QCamera3HardwareInterface::deinitParameters()
9191{
9192 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9193 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9194
9195 mParamHeap->deallocate();
9196 delete mParamHeap;
9197 mParamHeap = NULL;
9198
9199 mParameters = NULL;
9200
9201 free(mPrevParameters);
9202 mPrevParameters = NULL;
9203}
9204
9205/*===========================================================================
9206 * FUNCTION : calcMaxJpegSize
9207 *
9208 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9209 *
9210 * PARAMETERS :
9211 *
9212 * RETURN : max_jpeg_size
9213 *==========================================================================*/
9214size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9215{
9216 size_t max_jpeg_size = 0;
9217 size_t temp_width, temp_height;
9218 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9219 MAX_SIZES_CNT);
9220 for (size_t i = 0; i < count; i++) {
9221 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9222 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9223 if (temp_width * temp_height > max_jpeg_size ) {
9224 max_jpeg_size = temp_width * temp_height;
9225 }
9226 }
9227 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9228 return max_jpeg_size;
9229}
9230
9231/*===========================================================================
9232 * FUNCTION : getMaxRawSize
9233 *
9234 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9235 *
9236 * PARAMETERS :
9237 *
9238 * RETURN : Largest supported Raw Dimension
9239 *==========================================================================*/
9240cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9241{
9242 int max_width = 0;
9243 cam_dimension_t maxRawSize;
9244
9245 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9246 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9247 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9248 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9249 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9250 }
9251 }
9252 return maxRawSize;
9253}
9254
9255
9256/*===========================================================================
9257 * FUNCTION : calcMaxJpegDim
9258 *
9259 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9260 *
9261 * PARAMETERS :
9262 *
9263 * RETURN : max_jpeg_dim
9264 *==========================================================================*/
9265cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9266{
9267 cam_dimension_t max_jpeg_dim;
9268 cam_dimension_t curr_jpeg_dim;
9269 max_jpeg_dim.width = 0;
9270 max_jpeg_dim.height = 0;
9271 curr_jpeg_dim.width = 0;
9272 curr_jpeg_dim.height = 0;
9273 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9274 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9275 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9276 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9277 max_jpeg_dim.width * max_jpeg_dim.height ) {
9278 max_jpeg_dim.width = curr_jpeg_dim.width;
9279 max_jpeg_dim.height = curr_jpeg_dim.height;
9280 }
9281 }
9282 return max_jpeg_dim;
9283}
9284
9285/*===========================================================================
9286 * FUNCTION : addStreamConfig
9287 *
9288 * DESCRIPTION: adds the stream configuration to the array
9289 *
9290 * PARAMETERS :
9291 * @available_stream_configs : pointer to stream configuration array
9292 * @scalar_format : scalar format
9293 * @dim : configuration dimension
9294 * @config_type : input or output configuration type
9295 *
9296 * RETURN : NONE
9297 *==========================================================================*/
9298void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9299 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9300{
9301 available_stream_configs.add(scalar_format);
9302 available_stream_configs.add(dim.width);
9303 available_stream_configs.add(dim.height);
9304 available_stream_configs.add(config_type);
9305}
9306
9307/*===========================================================================
9308 * FUNCTION : suppportBurstCapture
9309 *
9310 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9311 *
9312 * PARAMETERS :
9313 * @cameraId : camera Id
9314 *
9315 * RETURN : true if camera supports BURST_CAPTURE
9316 * false otherwise
9317 *==========================================================================*/
9318bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9319{
9320 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9321 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9322 const int32_t highResWidth = 3264;
9323 const int32_t highResHeight = 2448;
9324
9325 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9326 // Maximum resolution images cannot be captured at >= 10fps
9327 // -> not supporting BURST_CAPTURE
9328 return false;
9329 }
9330
9331 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9332 // Maximum resolution images can be captured at >= 20fps
9333 // --> supporting BURST_CAPTURE
9334 return true;
9335 }
9336
9337 // Find the smallest highRes resolution, or largest resolution if there is none
9338 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9339 MAX_SIZES_CNT);
9340 size_t highRes = 0;
9341 while ((highRes + 1 < totalCnt) &&
9342 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9343 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9344 highResWidth * highResHeight)) {
9345 highRes++;
9346 }
9347 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9348 return true;
9349 } else {
9350 return false;
9351 }
9352}
9353
9354/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009355 * FUNCTION : getPDStatIndex
9356 *
9357 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9358 *
9359 * PARAMETERS :
9360 * @caps : camera capabilities
9361 *
9362 * RETURN : int32_t type
9363 * non-negative - on success
9364 * -1 - on failure
9365 *==========================================================================*/
9366int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9367 if (nullptr == caps) {
9368 return -1;
9369 }
9370
9371 uint32_t metaRawCount = caps->meta_raw_channel_count;
9372 int32_t ret = -1;
9373 for (size_t i = 0; i < metaRawCount; i++) {
9374 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9375 ret = i;
9376 break;
9377 }
9378 }
9379
9380 return ret;
9381}
9382
9383/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009384 * FUNCTION : initStaticMetadata
9385 *
9386 * DESCRIPTION: initialize the static metadata
9387 *
9388 * PARAMETERS :
9389 * @cameraId : camera Id
9390 *
9391 * RETURN : int32_t type of status
9392 * 0 -- success
9393 * non-zero failure code
9394 *==========================================================================*/
9395int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9396{
9397 int rc = 0;
9398 CameraMetadata staticInfo;
9399 size_t count = 0;
9400 bool limitedDevice = false;
9401 char prop[PROPERTY_VALUE_MAX];
9402 bool supportBurst = false;
Emilian Peeve91e9ae2017-09-18 14:40:55 +01009403 Vector<int32_t> available_characteristics_keys;
Thierry Strudel3d639192016-09-09 11:52:26 -07009404
9405 supportBurst = supportBurstCapture(cameraId);
9406
9407 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9408 * guaranteed or if min fps of max resolution is less than 20 fps, its
9409 * advertised as limited device*/
9410 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9411 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9412 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9413 !supportBurst;
9414
9415 uint8_t supportedHwLvl = limitedDevice ?
9416 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009417#ifndef USE_HAL_3_3
9418 // LEVEL_3 - This device will support level 3.
9419 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9420#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009421 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009422#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009423
9424 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9425 &supportedHwLvl, 1);
9426
9427 bool facingBack = false;
9428 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9429 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9430 facingBack = true;
9431 }
9432 /*HAL 3 only*/
9433 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9434 &gCamCapability[cameraId]->min_focus_distance, 1);
9435
9436 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9437 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9438
9439 /*should be using focal lengths but sensor doesn't provide that info now*/
9440 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9441 &gCamCapability[cameraId]->focal_length,
9442 1);
9443
9444 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9445 gCamCapability[cameraId]->apertures,
9446 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9447
9448 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9449 gCamCapability[cameraId]->filter_densities,
9450 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9451
9452
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009453 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9454 size_t mode_count =
9455 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9456 for (size_t i = 0; i < mode_count; i++) {
9457 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9458 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009459 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009460 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009461
9462 int32_t lens_shading_map_size[] = {
9463 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9464 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9465 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9466 lens_shading_map_size,
9467 sizeof(lens_shading_map_size)/sizeof(int32_t));
9468
9469 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9470 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9471
9472 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9473 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9474
9475 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9476 &gCamCapability[cameraId]->max_frame_duration, 1);
9477
9478 camera_metadata_rational baseGainFactor = {
9479 gCamCapability[cameraId]->base_gain_factor.numerator,
9480 gCamCapability[cameraId]->base_gain_factor.denominator};
9481 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9482 &baseGainFactor, 1);
9483
9484 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9485 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9486
9487 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9488 gCamCapability[cameraId]->pixel_array_size.height};
9489 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9490 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9491
9492 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9493 gCamCapability[cameraId]->active_array_size.top,
9494 gCamCapability[cameraId]->active_array_size.width,
9495 gCamCapability[cameraId]->active_array_size.height};
9496 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9497 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9498
9499 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9500 &gCamCapability[cameraId]->white_level, 1);
9501
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009502 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9503 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9504 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009505 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009506 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009507
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009508#ifndef USE_HAL_3_3
9509 bool hasBlackRegions = false;
9510 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9511 LOGW("black_region_count: %d is bounded to %d",
9512 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9513 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9514 }
9515 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9516 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9517 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9518 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9519 }
9520 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9521 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9522 hasBlackRegions = true;
9523 }
9524#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009525 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9526 &gCamCapability[cameraId]->flash_charge_duration, 1);
9527
9528 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9529 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9530
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009531 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9532 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9533 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009534 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9535 &timestampSource, 1);
9536
Thierry Strudel54dc9782017-02-15 12:12:10 -08009537 //update histogram vendor data
9538 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009539 &gCamCapability[cameraId]->histogram_size, 1);
9540
Thierry Strudel54dc9782017-02-15 12:12:10 -08009541 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009542 &gCamCapability[cameraId]->max_histogram_count, 1);
9543
Shuzhen Wang14415f52016-11-16 18:26:18 -08009544 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9545 //so that app can request fewer number of bins than the maximum supported.
9546 std::vector<int32_t> histBins;
9547 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9548 histBins.push_back(maxHistBins);
9549 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9550 (maxHistBins & 0x1) == 0) {
9551 histBins.push_back(maxHistBins >> 1);
9552 maxHistBins >>= 1;
9553 }
9554 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9555 histBins.data(), histBins.size());
Emilian Peeve91e9ae2017-09-18 14:40:55 +01009556 if (!histBins.empty()) {
9557 available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS);
9558 }
Shuzhen Wang14415f52016-11-16 18:26:18 -08009559
Thierry Strudel3d639192016-09-09 11:52:26 -07009560 int32_t sharpness_map_size[] = {
9561 gCamCapability[cameraId]->sharpness_map_size.width,
9562 gCamCapability[cameraId]->sharpness_map_size.height};
9563
9564 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9565 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9566
9567 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9568 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9569
Emilian Peev0f3c3162017-03-15 12:57:46 +00009570 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9571 if (0 <= indexPD) {
9572 // Advertise PD stats data as part of the Depth capabilities
9573 int32_t depthWidth =
9574 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9575 int32_t depthHeight =
9576 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009577 int32_t depthStride =
9578 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009579 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9580 assert(0 < depthSamplesCount);
9581 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9582 &depthSamplesCount, 1);
9583
9584 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9585 depthHeight,
9586 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9587 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9588 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9589 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9590 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9591
9592 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9593 depthHeight, 33333333,
9594 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9595 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9596 depthMinDuration,
9597 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9598
9599 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9600 depthHeight, 0,
9601 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9602 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9603 depthStallDuration,
9604 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9605
9606 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9607 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009608
9609 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9610 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9611 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peeve91e9ae2017-09-18 14:40:55 +01009612 available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS);
Emilian Peev835938b2017-08-31 16:59:54 +01009613
9614 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS,
9615 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.right_gain_map),
9616 sizeof(gCamCapability[cameraId]->pdaf_cal.right_gain_map));
Emilian Peeve91e9ae2017-09-18 14:40:55 +01009617 available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS);
Emilian Peev835938b2017-08-31 16:59:54 +01009618
9619 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS,
9620 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.left_gain_map),
9621 sizeof(gCamCapability[cameraId]->pdaf_cal.left_gain_map));
Emilian Peeve91e9ae2017-09-18 14:40:55 +01009622 available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS);
Emilian Peev835938b2017-08-31 16:59:54 +01009623
9624 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF,
9625 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.conversion_coeff),
9626 sizeof(gCamCapability[cameraId]->pdaf_cal.conversion_coeff));
Emilian Peeve91e9ae2017-09-18 14:40:55 +01009627 available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF);
9628
Emilian Peev0f3c3162017-03-15 12:57:46 +00009629 }
9630
Thierry Strudel3d639192016-09-09 11:52:26 -07009631 int32_t scalar_formats[] = {
9632 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9633 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9634 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9635 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9636 HAL_PIXEL_FORMAT_RAW10,
9637 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009638 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9639 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9640 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009641
9642 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9643 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9644 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9645 count, MAX_SIZES_CNT, available_processed_sizes);
9646 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9647 available_processed_sizes, count * 2);
9648
9649 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9650 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9651 makeTable(gCamCapability[cameraId]->raw_dim,
9652 count, MAX_SIZES_CNT, available_raw_sizes);
9653 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9654 available_raw_sizes, count * 2);
9655
9656 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9657 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9658 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9659 count, MAX_SIZES_CNT, available_fps_ranges);
9660 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9661 available_fps_ranges, count * 2);
9662
9663 camera_metadata_rational exposureCompensationStep = {
9664 gCamCapability[cameraId]->exp_compensation_step.numerator,
9665 gCamCapability[cameraId]->exp_compensation_step.denominator};
9666 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9667 &exposureCompensationStep, 1);
9668
9669 Vector<uint8_t> availableVstabModes;
9670 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9671 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009672 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009673 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009674 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009675 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009676 count = IS_TYPE_MAX;
9677 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9678 for (size_t i = 0; i < count; i++) {
9679 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9680 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9681 eisSupported = true;
9682 break;
9683 }
9684 }
9685 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009686 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9687 }
9688 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9689 availableVstabModes.array(), availableVstabModes.size());
9690
9691 /*HAL 1 and HAL 3 common*/
9692 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9693 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9694 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009695 // Cap the max zoom to the max preferred value
9696 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009697 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9698 &maxZoom, 1);
9699
9700 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9701 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9702
9703 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9704 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9705 max3aRegions[2] = 0; /* AF not supported */
9706 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9707 max3aRegions, 3);
9708
9709 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9710 memset(prop, 0, sizeof(prop));
9711 property_get("persist.camera.facedetect", prop, "1");
9712 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9713 LOGD("Support face detection mode: %d",
9714 supportedFaceDetectMode);
9715
9716 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009717 /* support mode should be OFF if max number of face is 0 */
9718 if (maxFaces <= 0) {
9719 supportedFaceDetectMode = 0;
9720 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009721 Vector<uint8_t> availableFaceDetectModes;
9722 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9723 if (supportedFaceDetectMode == 1) {
9724 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9725 } else if (supportedFaceDetectMode == 2) {
9726 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9727 } else if (supportedFaceDetectMode == 3) {
9728 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9729 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9730 } else {
9731 maxFaces = 0;
9732 }
9733 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9734 availableFaceDetectModes.array(),
9735 availableFaceDetectModes.size());
9736 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9737 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009738 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9739 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9740 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009741
9742 int32_t exposureCompensationRange[] = {
9743 gCamCapability[cameraId]->exposure_compensation_min,
9744 gCamCapability[cameraId]->exposure_compensation_max};
9745 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9746 exposureCompensationRange,
9747 sizeof(exposureCompensationRange)/sizeof(int32_t));
9748
9749 uint8_t lensFacing = (facingBack) ?
9750 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9751 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9752
9753 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9754 available_thumbnail_sizes,
9755 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9756
9757 /*all sizes will be clubbed into this tag*/
9758 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9759 /*android.scaler.availableStreamConfigurations*/
9760 Vector<int32_t> available_stream_configs;
9761 cam_dimension_t active_array_dim;
9762 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9763 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009764
9765 /*advertise list of input dimensions supported based on below property.
9766 By default all sizes upto 5MP will be advertised.
9767 Note that the setprop resolution format should be WxH.
9768 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9769 To list all supported sizes, setprop needs to be set with "0x0" */
9770 cam_dimension_t minInputSize = {2592,1944}; //5MP
9771 memset(prop, 0, sizeof(prop));
9772 property_get("persist.camera.input.minsize", prop, "2592x1944");
9773 if (strlen(prop) > 0) {
9774 char *saveptr = NULL;
9775 char *token = strtok_r(prop, "x", &saveptr);
9776 if (token != NULL) {
9777 minInputSize.width = atoi(token);
9778 }
9779 token = strtok_r(NULL, "x", &saveptr);
9780 if (token != NULL) {
9781 minInputSize.height = atoi(token);
9782 }
9783 }
9784
Thierry Strudel3d639192016-09-09 11:52:26 -07009785 /* Add input/output stream configurations for each scalar formats*/
9786 for (size_t j = 0; j < scalar_formats_count; j++) {
9787 switch (scalar_formats[j]) {
9788 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9789 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9790 case HAL_PIXEL_FORMAT_RAW10:
9791 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9792 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9793 addStreamConfig(available_stream_configs, scalar_formats[j],
9794 gCamCapability[cameraId]->raw_dim[i],
9795 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9796 }
9797 break;
9798 case HAL_PIXEL_FORMAT_BLOB:
9799 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9800 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9801 addStreamConfig(available_stream_configs, scalar_formats[j],
9802 gCamCapability[cameraId]->picture_sizes_tbl[i],
9803 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9804 }
9805 break;
9806 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9807 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9808 default:
9809 cam_dimension_t largest_picture_size;
9810 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9811 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9812 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9813 addStreamConfig(available_stream_configs, scalar_formats[j],
9814 gCamCapability[cameraId]->picture_sizes_tbl[i],
9815 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009816 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009817 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9818 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009819 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9820 >= minInputSize.width) || (gCamCapability[cameraId]->
9821 picture_sizes_tbl[i].height >= minInputSize.height)) {
9822 addStreamConfig(available_stream_configs, scalar_formats[j],
9823 gCamCapability[cameraId]->picture_sizes_tbl[i],
9824 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9825 }
9826 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009827 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009828
Thierry Strudel3d639192016-09-09 11:52:26 -07009829 break;
9830 }
9831 }
9832
9833 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9834 available_stream_configs.array(), available_stream_configs.size());
9835 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9836 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9837
9838 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9839 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9840
9841 /* android.scaler.availableMinFrameDurations */
9842 Vector<int64_t> available_min_durations;
9843 for (size_t j = 0; j < scalar_formats_count; j++) {
9844 switch (scalar_formats[j]) {
9845 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9846 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9847 case HAL_PIXEL_FORMAT_RAW10:
9848 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9849 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9850 available_min_durations.add(scalar_formats[j]);
9851 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9852 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9853 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9854 }
9855 break;
9856 default:
9857 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9858 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9859 available_min_durations.add(scalar_formats[j]);
9860 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9861 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9862 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9863 }
9864 break;
9865 }
9866 }
9867 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9868 available_min_durations.array(), available_min_durations.size());
9869
9870 Vector<int32_t> available_hfr_configs;
9871 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9872 int32_t fps = 0;
9873 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9874 case CAM_HFR_MODE_60FPS:
9875 fps = 60;
9876 break;
9877 case CAM_HFR_MODE_90FPS:
9878 fps = 90;
9879 break;
9880 case CAM_HFR_MODE_120FPS:
9881 fps = 120;
9882 break;
9883 case CAM_HFR_MODE_150FPS:
9884 fps = 150;
9885 break;
9886 case CAM_HFR_MODE_180FPS:
9887 fps = 180;
9888 break;
9889 case CAM_HFR_MODE_210FPS:
9890 fps = 210;
9891 break;
9892 case CAM_HFR_MODE_240FPS:
9893 fps = 240;
9894 break;
9895 case CAM_HFR_MODE_480FPS:
9896 fps = 480;
9897 break;
9898 case CAM_HFR_MODE_OFF:
9899 case CAM_HFR_MODE_MAX:
9900 default:
9901 break;
9902 }
9903
9904 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9905 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9906 /* For each HFR frame rate, need to advertise one variable fps range
9907 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9908 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9909 * set by the app. When video recording is started, [120, 120] is
9910 * set. This way sensor configuration does not change when recording
9911 * is started */
9912
9913 /* (width, height, fps_min, fps_max, batch_size_max) */
9914 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9915 j < MAX_SIZES_CNT; j++) {
9916 available_hfr_configs.add(
9917 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9918 available_hfr_configs.add(
9919 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9920 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9921 available_hfr_configs.add(fps);
9922 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9923
9924 /* (width, height, fps_min, fps_max, batch_size_max) */
9925 available_hfr_configs.add(
9926 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9927 available_hfr_configs.add(
9928 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9929 available_hfr_configs.add(fps);
9930 available_hfr_configs.add(fps);
9931 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9932 }
9933 }
9934 }
9935 //Advertise HFR capability only if the property is set
9936 memset(prop, 0, sizeof(prop));
9937 property_get("persist.camera.hal3hfr.enable", prop, "1");
9938 uint8_t hfrEnable = (uint8_t)atoi(prop);
9939
9940 if(hfrEnable && available_hfr_configs.array()) {
9941 staticInfo.update(
9942 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9943 available_hfr_configs.array(), available_hfr_configs.size());
9944 }
9945
9946 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9947 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9948 &max_jpeg_size, 1);
9949
9950 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9951 size_t size = 0;
9952 count = CAM_EFFECT_MODE_MAX;
9953 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9954 for (size_t i = 0; i < count; i++) {
9955 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9956 gCamCapability[cameraId]->supported_effects[i]);
9957 if (NAME_NOT_FOUND != val) {
9958 avail_effects[size] = (uint8_t)val;
9959 size++;
9960 }
9961 }
9962 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9963 avail_effects,
9964 size);
9965
9966 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9967 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9968 size_t supported_scene_modes_cnt = 0;
9969 count = CAM_SCENE_MODE_MAX;
9970 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9971 for (size_t i = 0; i < count; i++) {
9972 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9973 CAM_SCENE_MODE_OFF) {
9974 int val = lookupFwkName(SCENE_MODES_MAP,
9975 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9976 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009977
Thierry Strudel3d639192016-09-09 11:52:26 -07009978 if (NAME_NOT_FOUND != val) {
9979 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9980 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9981 supported_scene_modes_cnt++;
9982 }
9983 }
9984 }
9985 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9986 avail_scene_modes,
9987 supported_scene_modes_cnt);
9988
9989 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9990 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9991 supported_scene_modes_cnt,
9992 CAM_SCENE_MODE_MAX,
9993 scene_mode_overrides,
9994 supported_indexes,
9995 cameraId);
9996
9997 if (supported_scene_modes_cnt == 0) {
9998 supported_scene_modes_cnt = 1;
9999 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
10000 }
10001
10002 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
10003 scene_mode_overrides, supported_scene_modes_cnt * 3);
10004
10005 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
10006 ANDROID_CONTROL_MODE_AUTO,
10007 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
10008 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
10009 available_control_modes,
10010 3);
10011
10012 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
10013 size = 0;
10014 count = CAM_ANTIBANDING_MODE_MAX;
10015 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
10016 for (size_t i = 0; i < count; i++) {
10017 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
10018 gCamCapability[cameraId]->supported_antibandings[i]);
10019 if (NAME_NOT_FOUND != val) {
10020 avail_antibanding_modes[size] = (uint8_t)val;
10021 size++;
10022 }
10023
10024 }
10025 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
10026 avail_antibanding_modes,
10027 size);
10028
10029 uint8_t avail_abberation_modes[] = {
10030 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
10031 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
10032 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
10033 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
10034 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
10035 if (0 == count) {
10036 // If no aberration correction modes are available for a device, this advertise OFF mode
10037 size = 1;
10038 } else {
10039 // If count is not zero then atleast one among the FAST or HIGH quality is supported
10040 // So, advertize all 3 modes if atleast any one mode is supported as per the
10041 // new M requirement
10042 size = 3;
10043 }
10044 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10045 avail_abberation_modes,
10046 size);
10047
10048 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
10049 size = 0;
10050 count = CAM_FOCUS_MODE_MAX;
10051 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
10052 for (size_t i = 0; i < count; i++) {
10053 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10054 gCamCapability[cameraId]->supported_focus_modes[i]);
10055 if (NAME_NOT_FOUND != val) {
10056 avail_af_modes[size] = (uint8_t)val;
10057 size++;
10058 }
10059 }
10060 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
10061 avail_af_modes,
10062 size);
10063
10064 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
10065 size = 0;
10066 count = CAM_WB_MODE_MAX;
10067 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
10068 for (size_t i = 0; i < count; i++) {
10069 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10070 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10071 gCamCapability[cameraId]->supported_white_balances[i]);
10072 if (NAME_NOT_FOUND != val) {
10073 avail_awb_modes[size] = (uint8_t)val;
10074 size++;
10075 }
10076 }
10077 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
10078 avail_awb_modes,
10079 size);
10080
10081 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
10082 count = CAM_FLASH_FIRING_LEVEL_MAX;
10083 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
10084 count);
10085 for (size_t i = 0; i < count; i++) {
10086 available_flash_levels[i] =
10087 gCamCapability[cameraId]->supported_firing_levels[i];
10088 }
10089 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
10090 available_flash_levels, count);
10091
10092 uint8_t flashAvailable;
10093 if (gCamCapability[cameraId]->flash_available)
10094 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
10095 else
10096 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
10097 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
10098 &flashAvailable, 1);
10099
10100 Vector<uint8_t> avail_ae_modes;
10101 count = CAM_AE_MODE_MAX;
10102 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
10103 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080010104 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
10105 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
10106 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
10107 }
10108 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070010109 }
10110 if (flashAvailable) {
10111 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
10112 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
10113 }
10114 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
10115 avail_ae_modes.array(),
10116 avail_ae_modes.size());
10117
10118 int32_t sensitivity_range[2];
10119 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
10120 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
10121 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
10122 sensitivity_range,
10123 sizeof(sensitivity_range) / sizeof(int32_t));
10124
10125 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10126 &gCamCapability[cameraId]->max_analog_sensitivity,
10127 1);
10128
10129 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
10130 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
10131 &sensor_orientation,
10132 1);
10133
10134 int32_t max_output_streams[] = {
10135 MAX_STALLING_STREAMS,
10136 MAX_PROCESSED_STREAMS,
10137 MAX_RAW_STREAMS};
10138 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
10139 max_output_streams,
10140 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
10141
10142 uint8_t avail_leds = 0;
10143 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
10144 &avail_leds, 0);
10145
10146 uint8_t focus_dist_calibrated;
10147 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
10148 gCamCapability[cameraId]->focus_dist_calibrated);
10149 if (NAME_NOT_FOUND != val) {
10150 focus_dist_calibrated = (uint8_t)val;
10151 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10152 &focus_dist_calibrated, 1);
10153 }
10154
10155 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
10156 size = 0;
10157 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
10158 MAX_TEST_PATTERN_CNT);
10159 for (size_t i = 0; i < count; i++) {
10160 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
10161 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10162 if (NAME_NOT_FOUND != testpatternMode) {
10163 avail_testpattern_modes[size] = testpatternMode;
10164 size++;
10165 }
10166 }
10167 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10168 avail_testpattern_modes,
10169 size);
10170
10171 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10172 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10173 &max_pipeline_depth,
10174 1);
10175
10176 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10177 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10178 &partial_result_count,
10179 1);
10180
10181 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10182 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10183
10184 Vector<uint8_t> available_capabilities;
10185 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10186 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10187 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10188 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10189 if (supportBurst) {
10190 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10191 }
10192 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10193 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10194 if (hfrEnable && available_hfr_configs.array()) {
10195 available_capabilities.add(
10196 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10197 }
10198
10199 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10200 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10201 }
10202 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10203 available_capabilities.array(),
10204 available_capabilities.size());
10205
10206 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10207 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10208 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10209 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10210
10211 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10212 &aeLockAvailable, 1);
10213
10214 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10215 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10216 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10217 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10218
10219 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10220 &awbLockAvailable, 1);
10221
10222 int32_t max_input_streams = 1;
10223 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10224 &max_input_streams,
10225 1);
10226
10227 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10228 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10229 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10230 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10231 HAL_PIXEL_FORMAT_YCbCr_420_888};
10232 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10233 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10234
10235 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10236 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10237 &max_latency,
10238 1);
10239
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010240#ifndef USE_HAL_3_3
10241 int32_t isp_sensitivity_range[2];
10242 isp_sensitivity_range[0] =
10243 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10244 isp_sensitivity_range[1] =
10245 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10246 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10247 isp_sensitivity_range,
10248 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10249#endif
10250
Thierry Strudel3d639192016-09-09 11:52:26 -070010251 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10252 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10253 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10254 available_hot_pixel_modes,
10255 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10256
10257 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10258 ANDROID_SHADING_MODE_FAST,
10259 ANDROID_SHADING_MODE_HIGH_QUALITY};
10260 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10261 available_shading_modes,
10262 3);
10263
10264 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10265 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10266 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10267 available_lens_shading_map_modes,
10268 2);
10269
10270 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10271 ANDROID_EDGE_MODE_FAST,
10272 ANDROID_EDGE_MODE_HIGH_QUALITY,
10273 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10274 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10275 available_edge_modes,
10276 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10277
10278 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10279 ANDROID_NOISE_REDUCTION_MODE_FAST,
10280 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10281 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10282 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10283 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10284 available_noise_red_modes,
10285 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10286
10287 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10288 ANDROID_TONEMAP_MODE_FAST,
10289 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10290 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10291 available_tonemap_modes,
10292 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10293
10294 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10295 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10296 available_hot_pixel_map_modes,
10297 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10298
10299 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10300 gCamCapability[cameraId]->reference_illuminant1);
10301 if (NAME_NOT_FOUND != val) {
10302 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10303 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10304 }
10305
10306 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10307 gCamCapability[cameraId]->reference_illuminant2);
10308 if (NAME_NOT_FOUND != val) {
10309 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10310 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10311 }
10312
10313 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10314 (void *)gCamCapability[cameraId]->forward_matrix1,
10315 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10316
10317 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10318 (void *)gCamCapability[cameraId]->forward_matrix2,
10319 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10320
10321 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10322 (void *)gCamCapability[cameraId]->color_transform1,
10323 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10324
10325 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10326 (void *)gCamCapability[cameraId]->color_transform2,
10327 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10328
10329 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10330 (void *)gCamCapability[cameraId]->calibration_transform1,
10331 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10332
10333 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10334 (void *)gCamCapability[cameraId]->calibration_transform2,
10335 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10336
10337 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10338 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10339 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10340 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10341 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10342 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10343 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10344 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10345 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10346 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10347 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10348 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10349 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10350 ANDROID_JPEG_GPS_COORDINATES,
10351 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10352 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10353 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10354 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10355 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10356 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10357 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10358 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10359 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10360 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010361#ifndef USE_HAL_3_3
10362 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10363#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010364 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010365 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010366 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10367 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010368 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010369 QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
10370 QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
10371 QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TEMPORAL_DENOISE_ENABLE,
10372 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, QCAMERA3_USE_ISO_EXP_PRIORITY,
10373 QCAMERA3_SELECT_PRIORITY, QCAMERA3_USE_SATURATION,
10374 QCAMERA3_EXPOSURE_METER, QCAMERA3_USE_AV_TIMER,
10375 QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
10376 QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
10377 QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10378 QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
10379 QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
10380 QCAMERA3_JPEG_ENCODE_CROP_ENABLE, QCAMERA3_JPEG_ENCODE_CROP_RECT,
10381 QCAMERA3_JPEG_ENCODE_CROP_ROI, QCAMERA3_VIDEO_HDR_MODE,
10382 QCAMERA3_IR_MODE, QCAMERA3_AEC_CONVERGENCE_SPEED,
10383 QCAMERA3_AWB_CONVERGENCE_SPEED, QCAMERA3_INSTANT_AEC_MODE,
10384 QCAMERA3_SHARPNESS_STRENGTH, QCAMERA3_HISTOGRAM_MODE,
10385 QCAMERA3_BINNING_CORRECTION_MODE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010386 /* DevCamDebug metadata request_keys_basic */
10387 DEVCAMDEBUG_META_ENABLE,
10388 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010389 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010390 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010391 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010392 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010393 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010394 NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE
Samuel Ha68ba5172016-12-15 18:41:12 -080010395 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010396
10397 size_t request_keys_cnt =
10398 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10399 Vector<int32_t> available_request_keys;
10400 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10401 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10402 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10403 }
10404
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010405 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010406 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010407 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070010408 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
Chien-Yu Chenec328c82017-08-30 16:41:08 -070010409 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010410 }
10411
Thierry Strudel3d639192016-09-09 11:52:26 -070010412 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10413 available_request_keys.array(), available_request_keys.size());
10414
10415 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10416 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10417 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10418 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10419 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10420 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10421 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10422 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10423 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10424 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10425 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10426 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10427 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10428 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10429 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10430 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10431 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010432 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010433 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10434 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10435 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010436 ANDROID_STATISTICS_FACE_SCORES,
10437#ifndef USE_HAL_3_3
10438 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10439#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010440 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010441 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010442 QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
10443 QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
10444 QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TUNING_META_DATA_BLOB,
10445 QCAMERA3_TEMPORAL_DENOISE_ENABLE, QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE,
10446 QCAMERA3_EXPOSURE_METER, QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN,
10447 QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
10448 QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
10449 QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10450 QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
10451 QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB, QCAMERA3_VIDEO_HDR_MODE,
10452 QCAMERA3_IR_MODE, QCAMERA3_AEC_CONVERGENCE_SPEED,
10453 QCAMERA3_AWB_CONVERGENCE_SPEED, QCAMERA3_INSTANT_AEC_MODE,
10454 QCAMERA3_HISTOGRAM_MODE, QCAMERA3_BINNING_CORRECTION_MODE,
10455 QCAMERA3_STATS_IS_HDR_SCENE, QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
10456 QCAMERA3_STATS_BLINK_DETECTED, QCAMERA3_STATS_BLINK_DEGREE,
10457 QCAMERA3_STATS_SMILE_DEGREE, QCAMERA3_STATS_SMILE_CONFIDENCE,
10458 QCAMERA3_STATS_GAZE_ANGLE, QCAMERA3_STATS_GAZE_DIRECTION,
10459 QCAMERA3_STATS_GAZE_DEGREE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010460 // DevCamDebug metadata result_keys_basic
10461 DEVCAMDEBUG_META_ENABLE,
10462 // DevCamDebug metadata result_keys AF
10463 DEVCAMDEBUG_AF_LENS_POSITION,
10464 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10465 DEVCAMDEBUG_AF_TOF_DISTANCE,
10466 DEVCAMDEBUG_AF_LUMA,
10467 DEVCAMDEBUG_AF_HAF_STATE,
10468 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10469 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10470 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10471 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10472 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10473 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10474 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10475 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10476 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10477 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10478 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10479 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10480 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10481 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10482 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10483 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10484 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10485 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10486 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10487 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10488 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10489 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10490 // DevCamDebug metadata result_keys AEC
10491 DEVCAMDEBUG_AEC_TARGET_LUMA,
10492 DEVCAMDEBUG_AEC_COMP_LUMA,
10493 DEVCAMDEBUG_AEC_AVG_LUMA,
10494 DEVCAMDEBUG_AEC_CUR_LUMA,
10495 DEVCAMDEBUG_AEC_LINECOUNT,
10496 DEVCAMDEBUG_AEC_REAL_GAIN,
10497 DEVCAMDEBUG_AEC_EXP_INDEX,
10498 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010499 // DevCamDebug metadata result_keys zzHDR
10500 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10501 DEVCAMDEBUG_AEC_L_LINECOUNT,
10502 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10503 DEVCAMDEBUG_AEC_S_LINECOUNT,
10504 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10505 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10506 // DevCamDebug metadata result_keys ADRC
10507 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10508 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10509 DEVCAMDEBUG_AEC_GTM_RATIO,
10510 DEVCAMDEBUG_AEC_LTM_RATIO,
10511 DEVCAMDEBUG_AEC_LA_RATIO,
10512 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010513 // DevCamDebug metadata result_keys AEC MOTION
10514 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10515 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10516 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010517 // DevCamDebug metadata result_keys AWB
10518 DEVCAMDEBUG_AWB_R_GAIN,
10519 DEVCAMDEBUG_AWB_G_GAIN,
10520 DEVCAMDEBUG_AWB_B_GAIN,
10521 DEVCAMDEBUG_AWB_CCT,
10522 DEVCAMDEBUG_AWB_DECISION,
10523 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010524 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10525 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10526 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010527 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010528 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010529 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -070010530 NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE,
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010531 NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
10532 NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
10533 NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
10534 NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
10535 NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
10536 NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
10537 NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010538 };
10539
Thierry Strudel3d639192016-09-09 11:52:26 -070010540 size_t result_keys_cnt =
10541 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10542
10543 Vector<int32_t> available_result_keys;
10544 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10545 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10546 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10547 }
10548 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10549 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10550 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10551 }
10552 if (supportedFaceDetectMode == 1) {
10553 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10554 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10555 } else if ((supportedFaceDetectMode == 2) ||
10556 (supportedFaceDetectMode == 3)) {
10557 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10558 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10559 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010560#ifndef USE_HAL_3_3
Shuzhen Wanga1bc9de2017-09-14 16:54:02 -070010561 {
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010562 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10563 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10564 }
10565#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010566
10567 if (gExposeEnableZslKey) {
10568 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chendaf68892017-08-25 12:56:40 -070010569 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010570 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
10571 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010572 }
10573
Thierry Strudel3d639192016-09-09 11:52:26 -070010574 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10575 available_result_keys.array(), available_result_keys.size());
10576
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010577 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010578 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10579 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10580 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10581 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10582 ANDROID_SCALER_CROPPING_TYPE,
10583 ANDROID_SYNC_MAX_LATENCY,
10584 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10585 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10586 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10587 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10588 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10589 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10590 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10591 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10592 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10593 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10594 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10595 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10596 ANDROID_LENS_FACING,
10597 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10598 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10599 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10600 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10601 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10602 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10603 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10604 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10605 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10606 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10607 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10608 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10609 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10610 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10611 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10612 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10613 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10614 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10615 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10616 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010617 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010618 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10619 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10620 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10621 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10622 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10623 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10624 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10625 ANDROID_CONTROL_AVAILABLE_MODES,
10626 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10627 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10628 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10629 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010630 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10631#ifndef USE_HAL_3_3
10632 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10633 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10634#endif
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010635 QCAMERA3_OPAQUE_RAW_FORMAT, QCAMERA3_EXP_TIME_RANGE,
10636 QCAMERA3_SATURATION_RANGE, QCAMERA3_SENSOR_IS_MONO_ONLY,
10637 QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10638 QCAMERA3_SHARPNESS_RANGE,
10639 QCAMERA3_HISTOGRAM_BUCKETS, QCAMERA3_HISTOGRAM_MAX_COUNT,
10640 QCAMERA3_STATS_BSGC_AVAILABLE
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010641 };
10642
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010643 available_characteristics_keys.appendArray(characteristics_keys_basic,
10644 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10645#ifndef USE_HAL_3_3
10646 if (hasBlackRegions) {
10647 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10648 }
10649#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010650
10651 if (0 <= indexPD) {
10652 int32_t depthKeys[] = {
10653 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10654 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10655 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10656 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10657 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10658 };
10659 available_characteristics_keys.appendArray(depthKeys,
10660 sizeof(depthKeys) / sizeof(depthKeys[0]));
10661 }
10662
Thierry Strudel3d639192016-09-09 11:52:26 -070010663 /*available stall durations depend on the hw + sw and will be different for different devices */
10664 /*have to add for raw after implementation*/
10665 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10666 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10667
10668 Vector<int64_t> available_stall_durations;
10669 for (uint32_t j = 0; j < stall_formats_count; j++) {
10670 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10671 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10672 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10673 available_stall_durations.add(stall_formats[j]);
10674 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10675 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10676 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10677 }
10678 } else {
10679 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10680 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10681 available_stall_durations.add(stall_formats[j]);
10682 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10683 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10684 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10685 }
10686 }
10687 }
10688 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10689 available_stall_durations.array(),
10690 available_stall_durations.size());
10691
10692 //QCAMERA3_OPAQUE_RAW
10693 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10694 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10695 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10696 case LEGACY_RAW:
10697 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10698 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10699 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10700 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10701 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10702 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10703 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10704 break;
10705 case MIPI_RAW:
10706 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10707 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10708 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10709 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10710 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10711 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10712 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10713 break;
10714 default:
10715 LOGE("unknown opaque_raw_format %d",
10716 gCamCapability[cameraId]->opaque_raw_fmt);
10717 break;
10718 }
10719 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10720
10721 Vector<int32_t> strides;
10722 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10723 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10724 cam_stream_buf_plane_info_t buf_planes;
10725 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10726 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10727 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10728 &gCamCapability[cameraId]->padding_info, &buf_planes);
10729 strides.add(buf_planes.plane_info.mp[0].stride);
10730 }
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010731
10732 if (!strides.isEmpty()) {
10733 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10734 strides.size());
10735 available_characteristics_keys.add(QCAMERA3_OPAQUE_RAW_STRIDES);
10736 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010737
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010738 //TBD: remove the following line once backend advertises zzHDR in feature mask
10739 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010740 //Video HDR default
10741 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10742 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010743 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010744 int32_t vhdr_mode[] = {
10745 QCAMERA3_VIDEO_HDR_MODE_OFF,
10746 QCAMERA3_VIDEO_HDR_MODE_ON};
10747
10748 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10749 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10750 vhdr_mode, vhdr_mode_count);
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010751 available_characteristics_keys.add(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES);
Thierry Strudel04e026f2016-10-10 11:27:36 -070010752 }
10753
Thierry Strudel3d639192016-09-09 11:52:26 -070010754 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10755 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10756 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10757
10758 uint8_t isMonoOnly =
10759 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10760 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10761 &isMonoOnly, 1);
10762
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010763#ifndef USE_HAL_3_3
10764 Vector<int32_t> opaque_size;
10765 for (size_t j = 0; j < scalar_formats_count; j++) {
10766 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10767 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10768 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10769 cam_stream_buf_plane_info_t buf_planes;
10770
10771 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10772 &gCamCapability[cameraId]->padding_info, &buf_planes);
10773
10774 if (rc == 0) {
10775 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10776 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10777 opaque_size.add(buf_planes.plane_info.frame_len);
10778 }else {
10779 LOGE("raw frame calculation failed!");
10780 }
10781 }
10782 }
10783 }
10784
10785 if ((opaque_size.size() > 0) &&
10786 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10787 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10788 else
10789 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10790#endif
10791
Thierry Strudel04e026f2016-10-10 11:27:36 -070010792 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10793 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10794 size = 0;
10795 count = CAM_IR_MODE_MAX;
10796 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10797 for (size_t i = 0; i < count; i++) {
10798 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10799 gCamCapability[cameraId]->supported_ir_modes[i]);
10800 if (NAME_NOT_FOUND != val) {
10801 avail_ir_modes[size] = (int32_t)val;
10802 size++;
10803 }
10804 }
10805 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10806 avail_ir_modes, size);
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010807 available_characteristics_keys.add(QCAMERA3_IR_AVAILABLE_MODES);
Thierry Strudel04e026f2016-10-10 11:27:36 -070010808 }
10809
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010810 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10811 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10812 size = 0;
10813 count = CAM_AEC_CONVERGENCE_MAX;
10814 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10815 for (size_t i = 0; i < count; i++) {
10816 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10817 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10818 if (NAME_NOT_FOUND != val) {
10819 available_instant_aec_modes[size] = (int32_t)val;
10820 size++;
10821 }
10822 }
10823 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10824 available_instant_aec_modes, size);
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010825 available_characteristics_keys.add(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES);
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010826 }
10827
Thierry Strudel54dc9782017-02-15 12:12:10 -080010828 int32_t sharpness_range[] = {
10829 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10830 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10831 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10832
10833 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10834 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10835 size = 0;
10836 count = CAM_BINNING_CORRECTION_MODE_MAX;
10837 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10838 for (size_t i = 0; i < count; i++) {
10839 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10840 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10841 gCamCapability[cameraId]->supported_binning_modes[i]);
10842 if (NAME_NOT_FOUND != val) {
10843 avail_binning_modes[size] = (int32_t)val;
10844 size++;
10845 }
10846 }
10847 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10848 avail_binning_modes, size);
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010849 available_characteristics_keys.add(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES);
Thierry Strudel54dc9782017-02-15 12:12:10 -080010850 }
10851
10852 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10853 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10854 size = 0;
10855 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10856 for (size_t i = 0; i < count; i++) {
10857 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10858 gCamCapability[cameraId]->supported_aec_modes[i]);
10859 if (NAME_NOT_FOUND != val)
10860 available_aec_modes[size++] = val;
10861 }
10862 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10863 available_aec_modes, size);
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010864 available_characteristics_keys.add(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES);
Thierry Strudel54dc9782017-02-15 12:12:10 -080010865 }
10866
10867 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10868 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10869 size = 0;
10870 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10871 for (size_t i = 0; i < count; i++) {
10872 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10873 gCamCapability[cameraId]->supported_iso_modes[i]);
10874 if (NAME_NOT_FOUND != val)
10875 available_iso_modes[size++] = val;
10876 }
10877 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10878 available_iso_modes, size);
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010879 available_characteristics_keys.add(QCAMERA3_ISO_AVAILABLE_MODES);
Thierry Strudel54dc9782017-02-15 12:12:10 -080010880 }
10881
10882 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010883 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010884 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10885 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10886 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10887
10888 int32_t available_saturation_range[4];
10889 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10890 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10891 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10892 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10893 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10894 available_saturation_range, 4);
10895
10896 uint8_t is_hdr_values[2];
10897 is_hdr_values[0] = 0;
10898 is_hdr_values[1] = 1;
10899 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10900 is_hdr_values, 2);
10901
10902 float is_hdr_confidence_range[2];
10903 is_hdr_confidence_range[0] = 0.0;
10904 is_hdr_confidence_range[1] = 1.0;
10905 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10906 is_hdr_confidence_range, 2);
10907
Emilian Peev0a972ef2017-03-16 10:25:53 +000010908 size_t eepromLength = strnlen(
10909 reinterpret_cast<const char *>(
10910 gCamCapability[cameraId]->eeprom_version_info),
10911 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10912 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010913 char easelInfo[] = ",E:N";
10914 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10915 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10916 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010917 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
Arnd Geis082a4d72017-08-24 10:33:07 -070010918 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-ver" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010919 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010920 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010921 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10922 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010923 available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO);
Emilian Peev0a972ef2017-03-16 10:25:53 +000010924 }
10925
Emilian Peeve91e9ae2017-09-18 14:40:55 +010010926 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
10927 available_characteristics_keys.array(),
10928 available_characteristics_keys.size());
10929
Thierry Strudel3d639192016-09-09 11:52:26 -070010930 gStaticMetadata[cameraId] = staticInfo.release();
10931 return rc;
10932}
10933
10934/*===========================================================================
10935 * FUNCTION : makeTable
10936 *
10937 * DESCRIPTION: make a table of sizes
10938 *
10939 * PARAMETERS :
10940 *
10941 *
10942 *==========================================================================*/
10943void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10944 size_t max_size, int32_t *sizeTable)
10945{
10946 size_t j = 0;
10947 if (size > max_size) {
10948 size = max_size;
10949 }
10950 for (size_t i = 0; i < size; i++) {
10951 sizeTable[j] = dimTable[i].width;
10952 sizeTable[j+1] = dimTable[i].height;
10953 j+=2;
10954 }
10955}
10956
10957/*===========================================================================
10958 * FUNCTION : makeFPSTable
10959 *
10960 * DESCRIPTION: make a table of fps ranges
10961 *
10962 * PARAMETERS :
10963 *
10964 *==========================================================================*/
10965void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10966 size_t max_size, int32_t *fpsRangesTable)
10967{
10968 size_t j = 0;
10969 if (size > max_size) {
10970 size = max_size;
10971 }
10972 for (size_t i = 0; i < size; i++) {
10973 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10974 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10975 j+=2;
10976 }
10977}
10978
10979/*===========================================================================
10980 * FUNCTION : makeOverridesList
10981 *
10982 * DESCRIPTION: make a list of scene mode overrides
10983 *
10984 * PARAMETERS :
10985 *
10986 *
10987 *==========================================================================*/
10988void QCamera3HardwareInterface::makeOverridesList(
10989 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10990 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10991{
10992 /*daemon will give a list of overrides for all scene modes.
10993 However we should send the fwk only the overrides for the scene modes
10994 supported by the framework*/
10995 size_t j = 0;
10996 if (size > max_size) {
10997 size = max_size;
10998 }
10999 size_t focus_count = CAM_FOCUS_MODE_MAX;
11000 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
11001 focus_count);
11002 for (size_t i = 0; i < size; i++) {
11003 bool supt = false;
11004 size_t index = supported_indexes[i];
11005 overridesList[j] = gCamCapability[camera_id]->flash_available ?
11006 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
11007 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
11008 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11009 overridesTable[index].awb_mode);
11010 if (NAME_NOT_FOUND != val) {
11011 overridesList[j+1] = (uint8_t)val;
11012 }
11013 uint8_t focus_override = overridesTable[index].af_mode;
11014 for (size_t k = 0; k < focus_count; k++) {
11015 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
11016 supt = true;
11017 break;
11018 }
11019 }
11020 if (supt) {
11021 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11022 focus_override);
11023 if (NAME_NOT_FOUND != val) {
11024 overridesList[j+2] = (uint8_t)val;
11025 }
11026 } else {
11027 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
11028 }
11029 j+=3;
11030 }
11031}
11032
11033/*===========================================================================
11034 * FUNCTION : filterJpegSizes
11035 *
11036 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
11037 * could be downscaled to
11038 *
11039 * PARAMETERS :
11040 *
11041 * RETURN : length of jpegSizes array
11042 *==========================================================================*/
11043
11044size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
11045 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
11046 uint8_t downscale_factor)
11047{
11048 if (0 == downscale_factor) {
11049 downscale_factor = 1;
11050 }
11051
11052 int32_t min_width = active_array_size.width / downscale_factor;
11053 int32_t min_height = active_array_size.height / downscale_factor;
11054 size_t jpegSizesCnt = 0;
11055 if (processedSizesCnt > maxCount) {
11056 processedSizesCnt = maxCount;
11057 }
11058 for (size_t i = 0; i < processedSizesCnt; i+=2) {
11059 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
11060 jpegSizes[jpegSizesCnt] = processedSizes[i];
11061 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
11062 jpegSizesCnt += 2;
11063 }
11064 }
11065 return jpegSizesCnt;
11066}
11067
11068/*===========================================================================
11069 * FUNCTION : computeNoiseModelEntryS
11070 *
11071 * DESCRIPTION: function to map a given sensitivity to the S noise
11072 * model parameters in the DNG noise model.
11073 *
11074 * PARAMETERS : sens : the sensor sensitivity
11075 *
11076 ** RETURN : S (sensor amplification) noise
11077 *
11078 *==========================================================================*/
11079double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
11080 double s = gCamCapability[mCameraId]->gradient_S * sens +
11081 gCamCapability[mCameraId]->offset_S;
11082 return ((s < 0.0) ? 0.0 : s);
11083}
11084
11085/*===========================================================================
11086 * FUNCTION : computeNoiseModelEntryO
11087 *
11088 * DESCRIPTION: function to map a given sensitivity to the O noise
11089 * model parameters in the DNG noise model.
11090 *
11091 * PARAMETERS : sens : the sensor sensitivity
11092 *
11093 ** RETURN : O (sensor readout) noise
11094 *
11095 *==========================================================================*/
11096double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
11097 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
11098 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
11099 1.0 : (1.0 * sens / max_analog_sens);
11100 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
11101 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
11102 return ((o < 0.0) ? 0.0 : o);
11103}
11104
11105/*===========================================================================
11106 * FUNCTION : getSensorSensitivity
11107 *
11108 * DESCRIPTION: convert iso_mode to an integer value
11109 *
11110 * PARAMETERS : iso_mode : the iso_mode supported by sensor
11111 *
11112 ** RETURN : sensitivity supported by sensor
11113 *
11114 *==========================================================================*/
11115int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
11116{
11117 int32_t sensitivity;
11118
11119 switch (iso_mode) {
11120 case CAM_ISO_MODE_100:
11121 sensitivity = 100;
11122 break;
11123 case CAM_ISO_MODE_200:
11124 sensitivity = 200;
11125 break;
11126 case CAM_ISO_MODE_400:
11127 sensitivity = 400;
11128 break;
11129 case CAM_ISO_MODE_800:
11130 sensitivity = 800;
11131 break;
11132 case CAM_ISO_MODE_1600:
11133 sensitivity = 1600;
11134 break;
11135 default:
11136 sensitivity = -1;
11137 break;
11138 }
11139 return sensitivity;
11140}
11141
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011142int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011143 if (gEaselManagerClient == nullptr) {
11144 gEaselManagerClient = EaselManagerClient::create();
11145 if (gEaselManagerClient == nullptr) {
11146 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
11147 return -ENODEV;
11148 }
11149 }
11150
11151 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070011152 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
11153 // to connect to Easel.
11154 bool doNotpowerOnEasel =
11155 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
11156
11157 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070011158 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
11159 return OK;
11160 }
11161
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011162 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011163 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011164 if (res != OK) {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070011165 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res),
11166 res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011167 return res;
11168 }
11169
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011170 EaselManagerClientOpened = true;
11171
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011172 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011173 if (res != OK) {
11174 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
11175 }
11176
Zhijun Hedaacd8a2017-09-14 12:07:42 -070011177 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070011178 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011179
11180 // Expose enableZsl key only when HDR+ mode is enabled.
11181 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011182 }
11183
11184 return OK;
11185}
11186
Thierry Strudel3d639192016-09-09 11:52:26 -070011187/*===========================================================================
11188 * FUNCTION : getCamInfo
11189 *
11190 * DESCRIPTION: query camera capabilities
11191 *
11192 * PARAMETERS :
11193 * @cameraId : camera Id
11194 * @info : camera info struct to be filled in with camera capabilities
11195 *
11196 * RETURN : int type of status
11197 * NO_ERROR -- success
11198 * none-zero failure code
11199 *==========================================================================*/
11200int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
11201 struct camera_info *info)
11202{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011203 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070011204 int rc = 0;
11205
11206 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070011207
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011208 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070011209 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011210 rc = initHdrPlusClientLocked();
11211 if (rc != OK) {
11212 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
11213 pthread_mutex_unlock(&gCamLock);
11214 return rc;
11215 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070011216 }
11217
Thierry Strudel3d639192016-09-09 11:52:26 -070011218 if (NULL == gCamCapability[cameraId]) {
11219 rc = initCapabilities(cameraId);
11220 if (rc < 0) {
11221 pthread_mutex_unlock(&gCamLock);
11222 return rc;
11223 }
11224 }
11225
11226 if (NULL == gStaticMetadata[cameraId]) {
11227 rc = initStaticMetadata(cameraId);
11228 if (rc < 0) {
11229 pthread_mutex_unlock(&gCamLock);
11230 return rc;
11231 }
11232 }
11233
11234 switch(gCamCapability[cameraId]->position) {
11235 case CAM_POSITION_BACK:
11236 case CAM_POSITION_BACK_AUX:
11237 info->facing = CAMERA_FACING_BACK;
11238 break;
11239
11240 case CAM_POSITION_FRONT:
11241 case CAM_POSITION_FRONT_AUX:
11242 info->facing = CAMERA_FACING_FRONT;
11243 break;
11244
11245 default:
11246 LOGE("Unknown position type %d for camera id:%d",
11247 gCamCapability[cameraId]->position, cameraId);
11248 rc = -1;
11249 break;
11250 }
11251
11252
11253 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011254#ifndef USE_HAL_3_3
11255 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11256#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011257 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011258#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011259 info->static_camera_characteristics = gStaticMetadata[cameraId];
11260
11261 //For now assume both cameras can operate independently.
11262 info->conflicting_devices = NULL;
11263 info->conflicting_devices_length = 0;
11264
11265 //resource cost is 100 * MIN(1.0, m/M),
11266 //where m is throughput requirement with maximum stream configuration
11267 //and M is CPP maximum throughput.
11268 float max_fps = 0.0;
11269 for (uint32_t i = 0;
11270 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11271 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11272 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11273 }
11274 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11275 gCamCapability[cameraId]->active_array_size.width *
11276 gCamCapability[cameraId]->active_array_size.height * max_fps /
11277 gCamCapability[cameraId]->max_pixel_bandwidth;
11278 info->resource_cost = 100 * MIN(1.0, ratio);
11279 LOGI("camera %d resource cost is %d", cameraId,
11280 info->resource_cost);
11281
11282 pthread_mutex_unlock(&gCamLock);
11283 return rc;
11284}
11285
11286/*===========================================================================
11287 * FUNCTION : translateCapabilityToMetadata
11288 *
11289 * DESCRIPTION: translate the capability into camera_metadata_t
11290 *
11291 * PARAMETERS : type of the request
11292 *
11293 *
11294 * RETURN : success: camera_metadata_t*
11295 * failure: NULL
11296 *
11297 *==========================================================================*/
11298camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11299{
11300 if (mDefaultMetadata[type] != NULL) {
11301 return mDefaultMetadata[type];
11302 }
11303 //first time we are handling this request
11304 //fill up the metadata structure using the wrapper class
11305 CameraMetadata settings;
11306 //translate from cam_capability_t to camera_metadata_tag_t
11307 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11308 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11309 int32_t defaultRequestID = 0;
11310 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11311
11312 /* OIS disable */
11313 char ois_prop[PROPERTY_VALUE_MAX];
11314 memset(ois_prop, 0, sizeof(ois_prop));
11315 property_get("persist.camera.ois.disable", ois_prop, "0");
11316 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11317
11318 /* Force video to use OIS */
11319 char videoOisProp[PROPERTY_VALUE_MAX];
11320 memset(videoOisProp, 0, sizeof(videoOisProp));
11321 property_get("persist.camera.ois.video", videoOisProp, "1");
11322 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011323
11324 // Hybrid AE enable/disable
11325 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11326 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11327 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011328 uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011329
Thierry Strudel3d639192016-09-09 11:52:26 -070011330 uint8_t controlIntent = 0;
11331 uint8_t focusMode;
11332 uint8_t vsMode;
11333 uint8_t optStabMode;
11334 uint8_t cacMode;
11335 uint8_t edge_mode;
11336 uint8_t noise_red_mode;
11337 uint8_t tonemap_mode;
11338 bool highQualityModeEntryAvailable = FALSE;
11339 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011340 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011341 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11342 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011343 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011344 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011345 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011346
Thierry Strudel3d639192016-09-09 11:52:26 -070011347 switch (type) {
11348 case CAMERA3_TEMPLATE_PREVIEW:
11349 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11350 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11351 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11352 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11353 edge_mode = ANDROID_EDGE_MODE_FAST;
11354 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11355 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11356 break;
11357 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11358 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11359 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11360 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11361 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11362 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11363 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11364 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11365 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11366 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11367 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11368 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11369 highQualityModeEntryAvailable = TRUE;
11370 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11371 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11372 fastModeEntryAvailable = TRUE;
11373 }
11374 }
11375 if (highQualityModeEntryAvailable) {
11376 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11377 } else if (fastModeEntryAvailable) {
11378 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11379 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011380 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11381 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11382 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011383 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011384 break;
11385 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11386 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11387 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11388 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011389 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11390 edge_mode = ANDROID_EDGE_MODE_FAST;
11391 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11392 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11393 if (forceVideoOis)
11394 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11395 break;
11396 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11397 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11398 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11399 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011400 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11401 edge_mode = ANDROID_EDGE_MODE_FAST;
11402 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11403 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11404 if (forceVideoOis)
11405 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11406 break;
11407 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11408 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11409 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11410 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11411 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11412 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11413 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11414 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11415 break;
11416 case CAMERA3_TEMPLATE_MANUAL:
11417 edge_mode = ANDROID_EDGE_MODE_FAST;
11418 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11419 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11420 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11421 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11422 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11423 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11424 break;
11425 default:
11426 edge_mode = ANDROID_EDGE_MODE_FAST;
11427 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11428 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11429 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11430 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11431 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11432 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11433 break;
11434 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011435 // Set CAC to OFF if underlying device doesn't support
11436 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11437 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11438 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011439 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11440 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11441 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11442 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11443 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11444 }
11445 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011446 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011447 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011448
11449 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11450 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11451 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11452 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11453 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11454 || ois_disable)
11455 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11456 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011457 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011458
11459 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11460 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11461
11462 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11463 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11464
11465 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11466 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11467
11468 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11469 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11470
11471 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11472 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11473
11474 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11475 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11476
11477 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11478 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11479
11480 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11481 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11482
11483 /*flash*/
11484 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11485 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11486
11487 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11488 settings.update(ANDROID_FLASH_FIRING_POWER,
11489 &flashFiringLevel, 1);
11490
11491 /* lens */
11492 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11493 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11494
11495 if (gCamCapability[mCameraId]->filter_densities_count) {
11496 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11497 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11498 gCamCapability[mCameraId]->filter_densities_count);
11499 }
11500
11501 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11502 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11503
Thierry Strudel3d639192016-09-09 11:52:26 -070011504 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11505 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11506
11507 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11508 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11509
11510 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11511 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11512
11513 /* face detection (default to OFF) */
11514 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11515 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11516
Thierry Strudel54dc9782017-02-15 12:12:10 -080011517 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11518 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011519
11520 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11521 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11522
11523 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11524 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11525
Thierry Strudel3d639192016-09-09 11:52:26 -070011526
11527 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11528 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11529
11530 /* Exposure time(Update the Min Exposure Time)*/
11531 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11532 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11533
11534 /* frame duration */
11535 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11536 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11537
11538 /* sensitivity */
11539 static const int32_t default_sensitivity = 100;
11540 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011541#ifndef USE_HAL_3_3
11542 static const int32_t default_isp_sensitivity =
11543 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11544 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11545#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011546
11547 /*edge mode*/
11548 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11549
11550 /*noise reduction mode*/
11551 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11552
11553 /*color correction mode*/
11554 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11555 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11556
11557 /*transform matrix mode*/
11558 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11559
11560 int32_t scaler_crop_region[4];
11561 scaler_crop_region[0] = 0;
11562 scaler_crop_region[1] = 0;
11563 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11564 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11565 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11566
11567 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11568 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11569
11570 /*focus distance*/
11571 float focus_distance = 0.0;
11572 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11573
11574 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011575 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011576 float max_range = 0.0;
11577 float max_fixed_fps = 0.0;
11578 int32_t fps_range[2] = {0, 0};
11579 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11580 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011581 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11582 TEMPLATE_MAX_PREVIEW_FPS) {
11583 continue;
11584 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011585 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11586 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11587 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11588 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11589 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11590 if (range > max_range) {
11591 fps_range[0] =
11592 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11593 fps_range[1] =
11594 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11595 max_range = range;
11596 }
11597 } else {
11598 if (range < 0.01 && max_fixed_fps <
11599 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11600 fps_range[0] =
11601 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11602 fps_range[1] =
11603 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11604 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11605 }
11606 }
11607 }
11608 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11609
11610 /*precapture trigger*/
11611 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11612 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11613
11614 /*af trigger*/
11615 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11616 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11617
11618 /* ae & af regions */
11619 int32_t active_region[] = {
11620 gCamCapability[mCameraId]->active_array_size.left,
11621 gCamCapability[mCameraId]->active_array_size.top,
11622 gCamCapability[mCameraId]->active_array_size.left +
11623 gCamCapability[mCameraId]->active_array_size.width,
11624 gCamCapability[mCameraId]->active_array_size.top +
11625 gCamCapability[mCameraId]->active_array_size.height,
11626 0};
11627 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11628 sizeof(active_region) / sizeof(active_region[0]));
11629 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11630 sizeof(active_region) / sizeof(active_region[0]));
11631
11632 /* black level lock */
11633 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11634 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11635
Thierry Strudel3d639192016-09-09 11:52:26 -070011636 //special defaults for manual template
11637 if (type == CAMERA3_TEMPLATE_MANUAL) {
11638 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11639 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11640
11641 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11642 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11643
11644 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11645 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11646
11647 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11648 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11649
11650 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11651 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11652
11653 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11654 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11655 }
11656
11657
11658 /* TNR
11659 * We'll use this location to determine which modes TNR will be set.
11660 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11661 * This is not to be confused with linking on a per stream basis that decision
11662 * is still on per-session basis and will be handled as part of config stream
11663 */
11664 uint8_t tnr_enable = 0;
11665
11666 if (m_bTnrPreview || m_bTnrVideo) {
11667
11668 switch (type) {
11669 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11670 tnr_enable = 1;
11671 break;
11672
11673 default:
11674 tnr_enable = 0;
11675 break;
11676 }
11677
11678 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11679 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11680 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11681
11682 LOGD("TNR:%d with process plate %d for template:%d",
11683 tnr_enable, tnr_process_type, type);
11684 }
11685
11686 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011687 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011688 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11689
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011690 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011691 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11692
Shuzhen Wang920ea402017-05-03 08:49:39 -070011693 uint8_t related_camera_id = mCameraId;
11694 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011695
11696 /* CDS default */
11697 char prop[PROPERTY_VALUE_MAX];
11698 memset(prop, 0, sizeof(prop));
11699 property_get("persist.camera.CDS", prop, "Auto");
11700 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11701 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11702 if (CAM_CDS_MODE_MAX == cds_mode) {
11703 cds_mode = CAM_CDS_MODE_AUTO;
11704 }
11705
11706 /* Disabling CDS in templates which have TNR enabled*/
11707 if (tnr_enable)
11708 cds_mode = CAM_CDS_MODE_OFF;
11709
11710 int32_t mode = cds_mode;
11711 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011712
Thierry Strudel269c81a2016-10-12 12:13:59 -070011713 /* Manual Convergence AEC Speed is disabled by default*/
11714 float default_aec_speed = 0;
11715 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11716
11717 /* Manual Convergence AWB Speed is disabled by default*/
11718 float default_awb_speed = 0;
11719 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11720
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011721 // Set instant AEC to normal convergence by default
11722 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11723 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11724
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011725 if (gExposeEnableZslKey) {
11726 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070011727 int32_t postview = 0;
11728 settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070011729 int32_t continuousZslCapture = 0;
11730 settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -070011731 // Disable HDR+ for templates other than CAMERA3_TEMPLATE_STILL_CAPTURE and
11732 // CAMERA3_TEMPLATE_PREVIEW.
11733 int32_t disableHdrplus = (type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11734 type == CAMERA3_TEMPLATE_PREVIEW) ? 0 : 1;
Chien-Yu Chenec328c82017-08-30 16:41:08 -070011735 settings.update(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS, &disableHdrplus, 1);
11736
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011737 // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
11738 // hybrid ae is enabled for 3rd party app HDR+.
11739 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11740 type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
11741 hybrid_ae = 1;
11742 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011743 }
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011744 /* hybrid ae */
11745 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011746
Thierry Strudel3d639192016-09-09 11:52:26 -070011747 mDefaultMetadata[type] = settings.release();
11748
11749 return mDefaultMetadata[type];
11750}
11751
11752/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011753 * FUNCTION : getExpectedFrameDuration
11754 *
11755 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11756 * duration
11757 *
11758 * PARAMETERS :
11759 * @request : request settings
11760 * @frameDuration : The maximum frame duration in nanoseconds
11761 *
11762 * RETURN : None
11763 *==========================================================================*/
11764void QCamera3HardwareInterface::getExpectedFrameDuration(
11765 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11766 if (nullptr == frameDuration) {
11767 return;
11768 }
11769
11770 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11771 find_camera_metadata_ro_entry(request,
11772 ANDROID_SENSOR_EXPOSURE_TIME,
11773 &e);
11774 if (e.count > 0) {
11775 *frameDuration = e.data.i64[0];
11776 }
11777 find_camera_metadata_ro_entry(request,
11778 ANDROID_SENSOR_FRAME_DURATION,
11779 &e);
11780 if (e.count > 0) {
11781 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11782 }
11783}
11784
11785/*===========================================================================
11786 * FUNCTION : calculateMaxExpectedDuration
11787 *
11788 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11789 * current camera settings.
11790 *
11791 * PARAMETERS :
11792 * @request : request settings
11793 *
11794 * RETURN : Expected frame duration in nanoseconds.
11795 *==========================================================================*/
11796nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11797 const camera_metadata_t *request) {
11798 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11799 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11800 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11801 if (e.count == 0) {
11802 return maxExpectedDuration;
11803 }
11804
11805 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11806 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11807 }
11808
11809 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11810 return maxExpectedDuration;
11811 }
11812
11813 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11814 if (e.count == 0) {
11815 return maxExpectedDuration;
11816 }
11817
11818 switch (e.data.u8[0]) {
11819 case ANDROID_CONTROL_AE_MODE_OFF:
11820 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11821 break;
11822 default:
11823 find_camera_metadata_ro_entry(request,
11824 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11825 &e);
11826 if (e.count > 1) {
11827 maxExpectedDuration = 1e9 / e.data.u8[0];
11828 }
11829 break;
11830 }
11831
11832 return maxExpectedDuration;
11833}
11834
11835/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011836 * FUNCTION : setFrameParameters
11837 *
11838 * DESCRIPTION: set parameters per frame as requested in the metadata from
11839 * framework
11840 *
11841 * PARAMETERS :
11842 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011843 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011844 * @blob_request: Whether this request is a blob request or not
11845 *
11846 * RETURN : success: NO_ERROR
11847 * failure:
11848 *==========================================================================*/
11849int QCamera3HardwareInterface::setFrameParameters(
11850 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011851 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011852 int blob_request,
11853 uint32_t snapshotStreamId)
11854{
11855 /*translate from camera_metadata_t type to parm_type_t*/
11856 int rc = 0;
11857 int32_t hal_version = CAM_HAL_V3;
11858
11859 clear_metadata_buffer(mParameters);
11860 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11861 LOGE("Failed to set hal version in the parameters");
11862 return BAD_VALUE;
11863 }
11864
11865 /*we need to update the frame number in the parameters*/
11866 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11867 request->frame_number)) {
11868 LOGE("Failed to set the frame number in the parameters");
11869 return BAD_VALUE;
11870 }
11871
11872 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011873 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011874 LOGE("Failed to set stream type mask in the parameters");
11875 return BAD_VALUE;
11876 }
11877
11878 if (mUpdateDebugLevel) {
11879 uint32_t dummyDebugLevel = 0;
11880 /* The value of dummyDebugLevel is irrelavent. On
11881 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11882 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11883 dummyDebugLevel)) {
11884 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11885 return BAD_VALUE;
11886 }
11887 mUpdateDebugLevel = false;
11888 }
11889
11890 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011891 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011892 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11893 if (blob_request)
11894 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11895 }
11896
11897 return rc;
11898}
11899
11900/*===========================================================================
11901 * FUNCTION : setReprocParameters
11902 *
11903 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11904 * return it.
11905 *
11906 * PARAMETERS :
11907 * @request : request that needs to be serviced
11908 *
11909 * RETURN : success: NO_ERROR
11910 * failure:
11911 *==========================================================================*/
11912int32_t QCamera3HardwareInterface::setReprocParameters(
11913 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11914 uint32_t snapshotStreamId)
11915{
11916 /*translate from camera_metadata_t type to parm_type_t*/
11917 int rc = 0;
11918
11919 if (NULL == request->settings){
11920 LOGE("Reprocess settings cannot be NULL");
11921 return BAD_VALUE;
11922 }
11923
11924 if (NULL == reprocParam) {
11925 LOGE("Invalid reprocessing metadata buffer");
11926 return BAD_VALUE;
11927 }
11928 clear_metadata_buffer(reprocParam);
11929
11930 /*we need to update the frame number in the parameters*/
11931 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11932 request->frame_number)) {
11933 LOGE("Failed to set the frame number in the parameters");
11934 return BAD_VALUE;
11935 }
11936
11937 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11938 if (rc < 0) {
11939 LOGE("Failed to translate reproc request");
11940 return rc;
11941 }
11942
11943 CameraMetadata frame_settings;
11944 frame_settings = request->settings;
11945 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11946 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11947 int32_t *crop_count =
11948 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11949 int32_t *crop_data =
11950 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11951 int32_t *roi_map =
11952 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11953 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11954 cam_crop_data_t crop_meta;
11955 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11956 crop_meta.num_of_streams = 1;
11957 crop_meta.crop_info[0].crop.left = crop_data[0];
11958 crop_meta.crop_info[0].crop.top = crop_data[1];
11959 crop_meta.crop_info[0].crop.width = crop_data[2];
11960 crop_meta.crop_info[0].crop.height = crop_data[3];
11961
11962 crop_meta.crop_info[0].roi_map.left =
11963 roi_map[0];
11964 crop_meta.crop_info[0].roi_map.top =
11965 roi_map[1];
11966 crop_meta.crop_info[0].roi_map.width =
11967 roi_map[2];
11968 crop_meta.crop_info[0].roi_map.height =
11969 roi_map[3];
11970
11971 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11972 rc = BAD_VALUE;
11973 }
11974 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11975 request->input_buffer->stream,
11976 crop_meta.crop_info[0].crop.left,
11977 crop_meta.crop_info[0].crop.top,
11978 crop_meta.crop_info[0].crop.width,
11979 crop_meta.crop_info[0].crop.height);
11980 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11981 request->input_buffer->stream,
11982 crop_meta.crop_info[0].roi_map.left,
11983 crop_meta.crop_info[0].roi_map.top,
11984 crop_meta.crop_info[0].roi_map.width,
11985 crop_meta.crop_info[0].roi_map.height);
11986 } else {
11987 LOGE("Invalid reprocess crop count %d!", *crop_count);
11988 }
11989 } else {
11990 LOGE("No crop data from matching output stream");
11991 }
11992
11993 /* These settings are not needed for regular requests so handle them specially for
11994 reprocess requests; information needed for EXIF tags */
11995 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11996 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11997 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11998 if (NAME_NOT_FOUND != val) {
11999 uint32_t flashMode = (uint32_t)val;
12000 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
12001 rc = BAD_VALUE;
12002 }
12003 } else {
12004 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
12005 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12006 }
12007 } else {
12008 LOGH("No flash mode in reprocess settings");
12009 }
12010
12011 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
12012 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
12013 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
12014 rc = BAD_VALUE;
12015 }
12016 } else {
12017 LOGH("No flash state in reprocess settings");
12018 }
12019
12020 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
12021 uint8_t *reprocessFlags =
12022 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
12023 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
12024 *reprocessFlags)) {
12025 rc = BAD_VALUE;
12026 }
12027 }
12028
Thierry Strudel54dc9782017-02-15 12:12:10 -080012029 // Add exif debug data to internal metadata
12030 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
12031 mm_jpeg_debug_exif_params_t *debug_params =
12032 (mm_jpeg_debug_exif_params_t *)frame_settings.find
12033 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
12034 // AE
12035 if (debug_params->ae_debug_params_valid == TRUE) {
12036 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
12037 debug_params->ae_debug_params);
12038 }
12039 // AWB
12040 if (debug_params->awb_debug_params_valid == TRUE) {
12041 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
12042 debug_params->awb_debug_params);
12043 }
12044 // AF
12045 if (debug_params->af_debug_params_valid == TRUE) {
12046 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
12047 debug_params->af_debug_params);
12048 }
12049 // ASD
12050 if (debug_params->asd_debug_params_valid == TRUE) {
12051 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
12052 debug_params->asd_debug_params);
12053 }
12054 // Stats
12055 if (debug_params->stats_debug_params_valid == TRUE) {
12056 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
12057 debug_params->stats_debug_params);
12058 }
12059 // BE Stats
12060 if (debug_params->bestats_debug_params_valid == TRUE) {
12061 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
12062 debug_params->bestats_debug_params);
12063 }
12064 // BHIST
12065 if (debug_params->bhist_debug_params_valid == TRUE) {
12066 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
12067 debug_params->bhist_debug_params);
12068 }
12069 // 3A Tuning
12070 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
12071 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
12072 debug_params->q3a_tuning_debug_params);
12073 }
12074 }
12075
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012076 // Add metadata which reprocess needs
12077 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
12078 cam_reprocess_info_t *repro_info =
12079 (cam_reprocess_info_t *)frame_settings.find
12080 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070012081 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012082 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070012083 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012084 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070012085 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012086 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070012087 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012088 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070012089 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012090 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070012091 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012092 repro_info->pipeline_flip);
12093 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
12094 repro_info->af_roi);
12095 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
12096 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070012097 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
12098 CAM_INTF_PARM_ROTATION metadata then has been added in
12099 translateToHalMetadata. HAL need to keep this new rotation
12100 metadata. Otherwise, the old rotation info saved in the vendor tag
12101 would be used */
12102 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
12103 CAM_INTF_PARM_ROTATION, reprocParam) {
12104 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
12105 } else {
12106 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012107 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070012108 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012109 }
12110
12111 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
12112 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
12113 roi.width and roi.height would be the final JPEG size.
12114 For now, HAL only checks this for reprocess request */
12115 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
12116 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
12117 uint8_t *enable =
12118 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
12119 if (*enable == TRUE) {
12120 int32_t *crop_data =
12121 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
12122 cam_stream_crop_info_t crop_meta;
12123 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
12124 crop_meta.stream_id = 0;
12125 crop_meta.crop.left = crop_data[0];
12126 crop_meta.crop.top = crop_data[1];
12127 crop_meta.crop.width = crop_data[2];
12128 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012129 // The JPEG crop roi should match cpp output size
12130 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
12131 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
12132 crop_meta.roi_map.left = 0;
12133 crop_meta.roi_map.top = 0;
12134 crop_meta.roi_map.width = cpp_crop->crop.width;
12135 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070012136 }
12137 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
12138 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012139 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070012140 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012141 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
12142 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070012143 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012144 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
12145
12146 // Add JPEG scale information
12147 cam_dimension_t scale_dim;
12148 memset(&scale_dim, 0, sizeof(cam_dimension_t));
12149 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
12150 int32_t *roi =
12151 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
12152 scale_dim.width = roi[2];
12153 scale_dim.height = roi[3];
12154 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
12155 scale_dim);
12156 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
12157 scale_dim.width, scale_dim.height, mCameraId);
12158 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012159 }
12160 }
12161
12162 return rc;
12163}
12164
12165/*===========================================================================
12166 * FUNCTION : saveRequestSettings
12167 *
12168 * DESCRIPTION: Add any settings that might have changed to the request settings
12169 * and save the settings to be applied on the frame
12170 *
12171 * PARAMETERS :
12172 * @jpegMetadata : the extracted and/or modified jpeg metadata
12173 * @request : request with initial settings
12174 *
12175 * RETURN :
12176 * camera_metadata_t* : pointer to the saved request settings
12177 *==========================================================================*/
12178camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
12179 const CameraMetadata &jpegMetadata,
12180 camera3_capture_request_t *request)
12181{
12182 camera_metadata_t *resultMetadata;
12183 CameraMetadata camMetadata;
12184 camMetadata = request->settings;
12185
12186 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12187 int32_t thumbnail_size[2];
12188 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12189 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12190 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
12191 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
12192 }
12193
12194 if (request->input_buffer != NULL) {
12195 uint8_t reprocessFlags = 1;
12196 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
12197 (uint8_t*)&reprocessFlags,
12198 sizeof(reprocessFlags));
12199 }
12200
12201 resultMetadata = camMetadata.release();
12202 return resultMetadata;
12203}
12204
12205/*===========================================================================
12206 * FUNCTION : setHalFpsRange
12207 *
12208 * DESCRIPTION: set FPS range parameter
12209 *
12210 *
12211 * PARAMETERS :
12212 * @settings : Metadata from framework
12213 * @hal_metadata: Metadata buffer
12214 *
12215 *
12216 * RETURN : success: NO_ERROR
12217 * failure:
12218 *==========================================================================*/
12219int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
12220 metadata_buffer_t *hal_metadata)
12221{
12222 int32_t rc = NO_ERROR;
12223 cam_fps_range_t fps_range;
12224 fps_range.min_fps = (float)
12225 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
12226 fps_range.max_fps = (float)
12227 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
12228 fps_range.video_min_fps = fps_range.min_fps;
12229 fps_range.video_max_fps = fps_range.max_fps;
12230
12231 LOGD("aeTargetFpsRange fps: [%f %f]",
12232 fps_range.min_fps, fps_range.max_fps);
12233 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
12234 * follows:
12235 * ---------------------------------------------------------------|
12236 * Video stream is absent in configure_streams |
12237 * (Camcorder preview before the first video record |
12238 * ---------------------------------------------------------------|
12239 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12240 * | | | vid_min/max_fps|
12241 * ---------------------------------------------------------------|
12242 * NO | [ 30, 240] | 240 | [240, 240] |
12243 * |-------------|-------------|----------------|
12244 * | [240, 240] | 240 | [240, 240] |
12245 * ---------------------------------------------------------------|
12246 * Video stream is present in configure_streams |
12247 * ---------------------------------------------------------------|
12248 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12249 * | | | vid_min/max_fps|
12250 * ---------------------------------------------------------------|
12251 * NO | [ 30, 240] | 240 | [240, 240] |
12252 * (camcorder prev |-------------|-------------|----------------|
12253 * after video rec | [240, 240] | 240 | [240, 240] |
12254 * is stopped) | | | |
12255 * ---------------------------------------------------------------|
12256 * YES | [ 30, 240] | 240 | [240, 240] |
12257 * |-------------|-------------|----------------|
12258 * | [240, 240] | 240 | [240, 240] |
12259 * ---------------------------------------------------------------|
12260 * When Video stream is absent in configure_streams,
12261 * preview fps = sensor_fps / batchsize
12262 * Eg: for 240fps at batchSize 4, preview = 60fps
12263 * for 120fps at batchSize 4, preview = 30fps
12264 *
12265 * When video stream is present in configure_streams, preview fps is as per
12266 * the ratio of preview buffers to video buffers requested in process
12267 * capture request
12268 */
12269 mBatchSize = 0;
12270 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12271 fps_range.min_fps = fps_range.video_max_fps;
12272 fps_range.video_min_fps = fps_range.video_max_fps;
12273 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12274 fps_range.max_fps);
12275 if (NAME_NOT_FOUND != val) {
12276 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12277 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12278 return BAD_VALUE;
12279 }
12280
12281 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12282 /* If batchmode is currently in progress and the fps changes,
12283 * set the flag to restart the sensor */
12284 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12285 (mHFRVideoFps != fps_range.max_fps)) {
12286 mNeedSensorRestart = true;
12287 }
12288 mHFRVideoFps = fps_range.max_fps;
12289 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12290 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12291 mBatchSize = MAX_HFR_BATCH_SIZE;
12292 }
12293 }
12294 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12295
12296 }
12297 } else {
12298 /* HFR mode is session param in backend/ISP. This should be reset when
12299 * in non-HFR mode */
12300 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12301 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12302 return BAD_VALUE;
12303 }
12304 }
12305 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12306 return BAD_VALUE;
12307 }
12308 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12309 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12310 return rc;
12311}
12312
12313/*===========================================================================
12314 * FUNCTION : translateToHalMetadata
12315 *
12316 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12317 *
12318 *
12319 * PARAMETERS :
12320 * @request : request sent from framework
12321 *
12322 *
12323 * RETURN : success: NO_ERROR
12324 * failure:
12325 *==========================================================================*/
12326int QCamera3HardwareInterface::translateToHalMetadata
12327 (const camera3_capture_request_t *request,
12328 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012329 uint32_t snapshotStreamId) {
12330 if (request == nullptr || hal_metadata == nullptr) {
12331 return BAD_VALUE;
12332 }
12333
12334 int64_t minFrameDuration = getMinFrameDuration(request);
12335
12336 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12337 minFrameDuration);
12338}
12339
12340int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12341 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12342 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12343
Thierry Strudel3d639192016-09-09 11:52:26 -070012344 int rc = 0;
12345 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012346 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012347
12348 /* Do not change the order of the following list unless you know what you are
12349 * doing.
12350 * The order is laid out in such a way that parameters in the front of the table
12351 * may be used to override the parameters later in the table. Examples are:
12352 * 1. META_MODE should precede AEC/AWB/AF MODE
12353 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12354 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12355 * 4. Any mode should precede it's corresponding settings
12356 */
12357 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12358 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12359 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12360 rc = BAD_VALUE;
12361 }
12362 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12363 if (rc != NO_ERROR) {
12364 LOGE("extractSceneMode failed");
12365 }
12366 }
12367
12368 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12369 uint8_t fwk_aeMode =
12370 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12371 uint8_t aeMode;
12372 int32_t redeye;
12373
12374 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12375 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012376 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12377 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012378 } else {
12379 aeMode = CAM_AE_MODE_ON;
12380 }
12381 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12382 redeye = 1;
12383 } else {
12384 redeye = 0;
12385 }
12386
12387 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12388 fwk_aeMode);
12389 if (NAME_NOT_FOUND != val) {
12390 int32_t flashMode = (int32_t)val;
12391 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12392 }
12393
12394 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12395 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12396 rc = BAD_VALUE;
12397 }
12398 }
12399
12400 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12401 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12402 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12403 fwk_whiteLevel);
12404 if (NAME_NOT_FOUND != val) {
12405 uint8_t whiteLevel = (uint8_t)val;
12406 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12407 rc = BAD_VALUE;
12408 }
12409 }
12410 }
12411
12412 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12413 uint8_t fwk_cacMode =
12414 frame_settings.find(
12415 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12416 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12417 fwk_cacMode);
12418 if (NAME_NOT_FOUND != val) {
12419 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12420 bool entryAvailable = FALSE;
12421 // Check whether Frameworks set CAC mode is supported in device or not
12422 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12423 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12424 entryAvailable = TRUE;
12425 break;
12426 }
12427 }
12428 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12429 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12430 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12431 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12432 if (entryAvailable == FALSE) {
12433 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12434 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12435 } else {
12436 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12437 // High is not supported and so set the FAST as spec say's underlying
12438 // device implementation can be the same for both modes.
12439 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12440 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12441 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12442 // in order to avoid the fps drop due to high quality
12443 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12444 } else {
12445 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12446 }
12447 }
12448 }
12449 LOGD("Final cacMode is %d", cacMode);
12450 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12451 rc = BAD_VALUE;
12452 }
12453 } else {
12454 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12455 }
12456 }
12457
Jason Lee84ae9972017-02-24 13:24:24 -080012458 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012459 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012460 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012461 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012462 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12463 fwk_focusMode);
12464 if (NAME_NOT_FOUND != val) {
12465 uint8_t focusMode = (uint8_t)val;
12466 LOGD("set focus mode %d", focusMode);
12467 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12468 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12469 rc = BAD_VALUE;
12470 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012471 }
Shuzhen Wanga1d82a92017-09-19 14:39:43 -070012472 } else {
12473 LOGE("Fatal: Missing ANDROID_CONTROL_AF_MODE");
Thierry Strudel3d639192016-09-09 11:52:26 -070012474 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012475 } else {
12476 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12477 LOGE("Focus forced to infinity %d", focusMode);
12478 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12479 rc = BAD_VALUE;
12480 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012481 }
12482
Jason Lee84ae9972017-02-24 13:24:24 -080012483 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12484 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012485 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12486 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12487 focalDistance)) {
12488 rc = BAD_VALUE;
12489 }
12490 }
12491
12492 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12493 uint8_t fwk_antibandingMode =
12494 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12495 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12496 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12497 if (NAME_NOT_FOUND != val) {
12498 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012499 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12500 if (m60HzZone) {
12501 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12502 } else {
12503 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12504 }
12505 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012506 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12507 hal_antibandingMode)) {
12508 rc = BAD_VALUE;
12509 }
12510 }
12511 }
12512
12513 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12514 int32_t expCompensation = frame_settings.find(
12515 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12516 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12517 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12518 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12519 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012520 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012521 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12522 expCompensation)) {
12523 rc = BAD_VALUE;
12524 }
12525 }
12526
12527 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12528 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12529 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12530 rc = BAD_VALUE;
12531 }
12532 }
12533 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12534 rc = setHalFpsRange(frame_settings, hal_metadata);
12535 if (rc != NO_ERROR) {
12536 LOGE("setHalFpsRange failed");
12537 }
12538 }
12539
12540 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12541 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12542 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12543 rc = BAD_VALUE;
12544 }
12545 }
12546
12547 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12548 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12549 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12550 fwk_effectMode);
12551 if (NAME_NOT_FOUND != val) {
12552 uint8_t effectMode = (uint8_t)val;
12553 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12554 rc = BAD_VALUE;
12555 }
12556 }
12557 }
12558
12559 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12560 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12561 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12562 colorCorrectMode)) {
12563 rc = BAD_VALUE;
12564 }
12565 }
12566
12567 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12568 cam_color_correct_gains_t colorCorrectGains;
12569 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12570 colorCorrectGains.gains[i] =
12571 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12572 }
12573 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12574 colorCorrectGains)) {
12575 rc = BAD_VALUE;
12576 }
12577 }
12578
12579 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12580 cam_color_correct_matrix_t colorCorrectTransform;
12581 cam_rational_type_t transform_elem;
12582 size_t num = 0;
12583 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12584 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12585 transform_elem.numerator =
12586 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12587 transform_elem.denominator =
12588 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12589 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12590 num++;
12591 }
12592 }
12593 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12594 colorCorrectTransform)) {
12595 rc = BAD_VALUE;
12596 }
12597 }
12598
12599 cam_trigger_t aecTrigger;
12600 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12601 aecTrigger.trigger_id = -1;
12602 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12603 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12604 aecTrigger.trigger =
12605 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12606 aecTrigger.trigger_id =
12607 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12608 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12609 aecTrigger)) {
12610 rc = BAD_VALUE;
12611 }
12612 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12613 aecTrigger.trigger, aecTrigger.trigger_id);
12614 }
12615
12616 /*af_trigger must come with a trigger id*/
12617 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12618 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12619 cam_trigger_t af_trigger;
12620 af_trigger.trigger =
12621 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12622 af_trigger.trigger_id =
12623 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12624 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12625 rc = BAD_VALUE;
12626 }
12627 LOGD("AfTrigger: %d AfTriggerID: %d",
12628 af_trigger.trigger, af_trigger.trigger_id);
12629 }
12630
12631 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12632 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12633 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12634 rc = BAD_VALUE;
12635 }
12636 }
12637 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12638 cam_edge_application_t edge_application;
12639 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012640
Thierry Strudel3d639192016-09-09 11:52:26 -070012641 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12642 edge_application.sharpness = 0;
12643 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012644 edge_application.sharpness =
12645 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12646 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12647 int32_t sharpness =
12648 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12649 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12650 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12651 LOGD("Setting edge mode sharpness %d", sharpness);
12652 edge_application.sharpness = sharpness;
12653 }
12654 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012655 }
12656 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12657 rc = BAD_VALUE;
12658 }
12659 }
12660
12661 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12662 int32_t respectFlashMode = 1;
12663 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12664 uint8_t fwk_aeMode =
12665 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012666 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12667 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12668 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012669 respectFlashMode = 0;
12670 LOGH("AE Mode controls flash, ignore android.flash.mode");
12671 }
12672 }
12673 if (respectFlashMode) {
12674 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12675 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12676 LOGH("flash mode after mapping %d", val);
12677 // To check: CAM_INTF_META_FLASH_MODE usage
12678 if (NAME_NOT_FOUND != val) {
12679 uint8_t flashMode = (uint8_t)val;
12680 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12681 rc = BAD_VALUE;
12682 }
12683 }
12684 }
12685 }
12686
12687 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12688 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12689 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12690 rc = BAD_VALUE;
12691 }
12692 }
12693
12694 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12695 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12696 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12697 flashFiringTime)) {
12698 rc = BAD_VALUE;
12699 }
12700 }
12701
12702 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12703 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12704 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12705 hotPixelMode)) {
12706 rc = BAD_VALUE;
12707 }
12708 }
12709
12710 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12711 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12712 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12713 lensAperture)) {
12714 rc = BAD_VALUE;
12715 }
12716 }
12717
12718 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12719 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12720 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12721 filterDensity)) {
12722 rc = BAD_VALUE;
12723 }
12724 }
12725
12726 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12727 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12728 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12729 focalLength)) {
12730 rc = BAD_VALUE;
12731 }
12732 }
12733
12734 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12735 uint8_t optStabMode =
12736 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12737 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12738 optStabMode)) {
12739 rc = BAD_VALUE;
12740 }
12741 }
12742
12743 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12744 uint8_t videoStabMode =
12745 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12746 LOGD("videoStabMode from APP = %d", videoStabMode);
12747 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12748 videoStabMode)) {
12749 rc = BAD_VALUE;
12750 }
12751 }
12752
12753
12754 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12755 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12756 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12757 noiseRedMode)) {
12758 rc = BAD_VALUE;
12759 }
12760 }
12761
12762 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12763 float reprocessEffectiveExposureFactor =
12764 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12765 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12766 reprocessEffectiveExposureFactor)) {
12767 rc = BAD_VALUE;
12768 }
12769 }
12770
12771 cam_crop_region_t scalerCropRegion;
12772 bool scalerCropSet = false;
12773 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12774 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12775 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12776 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12777 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12778
12779 // Map coordinate system from active array to sensor output.
12780 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12781 scalerCropRegion.width, scalerCropRegion.height);
12782
12783 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12784 scalerCropRegion)) {
12785 rc = BAD_VALUE;
12786 }
12787 scalerCropSet = true;
12788 }
12789
12790 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12791 int64_t sensorExpTime =
12792 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12793 LOGD("setting sensorExpTime %lld", sensorExpTime);
12794 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12795 sensorExpTime)) {
12796 rc = BAD_VALUE;
12797 }
12798 }
12799
12800 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12801 int64_t sensorFrameDuration =
12802 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012803 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12804 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12805 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12806 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12807 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12808 sensorFrameDuration)) {
12809 rc = BAD_VALUE;
12810 }
12811 }
12812
12813 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12814 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12815 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12816 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12817 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12818 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12819 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12820 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12821 sensorSensitivity)) {
12822 rc = BAD_VALUE;
12823 }
12824 }
12825
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012826#ifndef USE_HAL_3_3
12827 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12828 int32_t ispSensitivity =
12829 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12830 if (ispSensitivity <
12831 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12832 ispSensitivity =
12833 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12834 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12835 }
12836 if (ispSensitivity >
12837 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12838 ispSensitivity =
12839 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12840 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12841 }
12842 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12843 ispSensitivity)) {
12844 rc = BAD_VALUE;
12845 }
12846 }
12847#endif
12848
Thierry Strudel3d639192016-09-09 11:52:26 -070012849 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12850 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12851 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12852 rc = BAD_VALUE;
12853 }
12854 }
12855
12856 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12857 uint8_t fwk_facedetectMode =
12858 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12859
12860 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12861 fwk_facedetectMode);
12862
12863 if (NAME_NOT_FOUND != val) {
12864 uint8_t facedetectMode = (uint8_t)val;
12865 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12866 facedetectMode)) {
12867 rc = BAD_VALUE;
12868 }
12869 }
12870 }
12871
Thierry Strudel54dc9782017-02-15 12:12:10 -080012872 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012873 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012874 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012875 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12876 histogramMode)) {
12877 rc = BAD_VALUE;
12878 }
12879 }
12880
12881 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12882 uint8_t sharpnessMapMode =
12883 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12884 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12885 sharpnessMapMode)) {
12886 rc = BAD_VALUE;
12887 }
12888 }
12889
12890 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12891 uint8_t tonemapMode =
12892 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12893 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12894 rc = BAD_VALUE;
12895 }
12896 }
12897 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12898 /*All tonemap channels will have the same number of points*/
12899 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12900 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12901 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12902 cam_rgb_tonemap_curves tonemapCurves;
12903 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12904 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12905 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12906 tonemapCurves.tonemap_points_cnt,
12907 CAM_MAX_TONEMAP_CURVE_SIZE);
12908 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12909 }
12910
12911 /* ch0 = G*/
12912 size_t point = 0;
12913 cam_tonemap_curve_t tonemapCurveGreen;
12914 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12915 for (size_t j = 0; j < 2; j++) {
12916 tonemapCurveGreen.tonemap_points[i][j] =
12917 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12918 point++;
12919 }
12920 }
12921 tonemapCurves.curves[0] = tonemapCurveGreen;
12922
12923 /* ch 1 = B */
12924 point = 0;
12925 cam_tonemap_curve_t tonemapCurveBlue;
12926 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12927 for (size_t j = 0; j < 2; j++) {
12928 tonemapCurveBlue.tonemap_points[i][j] =
12929 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12930 point++;
12931 }
12932 }
12933 tonemapCurves.curves[1] = tonemapCurveBlue;
12934
12935 /* ch 2 = R */
12936 point = 0;
12937 cam_tonemap_curve_t tonemapCurveRed;
12938 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12939 for (size_t j = 0; j < 2; j++) {
12940 tonemapCurveRed.tonemap_points[i][j] =
12941 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12942 point++;
12943 }
12944 }
12945 tonemapCurves.curves[2] = tonemapCurveRed;
12946
12947 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12948 tonemapCurves)) {
12949 rc = BAD_VALUE;
12950 }
12951 }
12952
12953 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12954 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12955 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12956 captureIntent)) {
12957 rc = BAD_VALUE;
12958 }
12959 }
12960
12961 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12962 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12963 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12964 blackLevelLock)) {
12965 rc = BAD_VALUE;
12966 }
12967 }
12968
12969 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12970 uint8_t lensShadingMapMode =
12971 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12972 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12973 lensShadingMapMode)) {
12974 rc = BAD_VALUE;
12975 }
12976 }
12977
12978 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12979 cam_area_t roi;
12980 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012981 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012982
12983 // Map coordinate system from active array to sensor output.
12984 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12985 roi.rect.height);
12986
12987 if (scalerCropSet) {
12988 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12989 }
12990 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12991 rc = BAD_VALUE;
12992 }
12993 }
12994
12995 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12996 cam_area_t roi;
12997 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012998 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012999
13000 // Map coordinate system from active array to sensor output.
13001 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
13002 roi.rect.height);
13003
13004 if (scalerCropSet) {
13005 reset = resetIfNeededROI(&roi, &scalerCropRegion);
13006 }
13007 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
13008 rc = BAD_VALUE;
13009 }
13010 }
13011
13012 // CDS for non-HFR non-video mode
13013 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
13014 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
13015 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
13016 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
13017 LOGE("Invalid CDS mode %d!", *fwk_cds);
13018 } else {
13019 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13020 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
13021 rc = BAD_VALUE;
13022 }
13023 }
13024 }
13025
Thierry Strudel04e026f2016-10-10 11:27:36 -070013026 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080013027 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013028 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080013029 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
13030 }
13031 if (m_bVideoHdrEnabled)
13032 vhdr = CAM_VIDEO_HDR_MODE_ON;
13033
Thierry Strudel54dc9782017-02-15 12:12:10 -080013034 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
13035
13036 if(vhdr != curr_hdr_state)
13037 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
13038
Mansoor Aftab93a66e52017-01-26 14:58:25 -080013039 rc = setVideoHdrMode(mParameters, vhdr);
13040 if (rc != NO_ERROR) {
13041 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013042 }
13043
13044 //IR
13045 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
13046 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
13047 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080013048 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
13049 uint8_t isIRon = 0;
13050
13051 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013052 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
13053 LOGE("Invalid IR mode %d!", fwk_ir);
13054 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080013055 if(isIRon != curr_ir_state )
13056 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
13057
Thierry Strudel04e026f2016-10-10 11:27:36 -070013058 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13059 CAM_INTF_META_IR_MODE, fwk_ir)) {
13060 rc = BAD_VALUE;
13061 }
13062 }
13063 }
13064
Thierry Strudel54dc9782017-02-15 12:12:10 -080013065 //Binning Correction Mode
13066 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
13067 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
13068 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
13069 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
13070 || (0 > fwk_binning_correction)) {
13071 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
13072 } else {
13073 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13074 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
13075 rc = BAD_VALUE;
13076 }
13077 }
13078 }
13079
Thierry Strudel269c81a2016-10-12 12:13:59 -070013080 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
13081 float aec_speed;
13082 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
13083 LOGD("AEC Speed :%f", aec_speed);
13084 if ( aec_speed < 0 ) {
13085 LOGE("Invalid AEC mode %f!", aec_speed);
13086 } else {
13087 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
13088 aec_speed)) {
13089 rc = BAD_VALUE;
13090 }
13091 }
13092 }
13093
13094 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
13095 float awb_speed;
13096 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
13097 LOGD("AWB Speed :%f", awb_speed);
13098 if ( awb_speed < 0 ) {
13099 LOGE("Invalid AWB mode %f!", awb_speed);
13100 } else {
13101 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
13102 awb_speed)) {
13103 rc = BAD_VALUE;
13104 }
13105 }
13106 }
13107
Thierry Strudel3d639192016-09-09 11:52:26 -070013108 // TNR
13109 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
13110 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
13111 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080013112 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070013113 cam_denoise_param_t tnr;
13114 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
13115 tnr.process_plates =
13116 (cam_denoise_process_type_t)frame_settings.find(
13117 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
13118 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080013119
13120 if(b_TnrRequested != curr_tnr_state)
13121 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
13122
Thierry Strudel3d639192016-09-09 11:52:26 -070013123 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
13124 rc = BAD_VALUE;
13125 }
13126 }
13127
Thierry Strudel54dc9782017-02-15 12:12:10 -080013128 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013129 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080013130 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013131 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
13132 *exposure_metering_mode)) {
13133 rc = BAD_VALUE;
13134 }
13135 }
13136
Thierry Strudel3d639192016-09-09 11:52:26 -070013137 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
13138 int32_t fwk_testPatternMode =
13139 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
13140 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
13141 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
13142
13143 if (NAME_NOT_FOUND != testPatternMode) {
13144 cam_test_pattern_data_t testPatternData;
13145 memset(&testPatternData, 0, sizeof(testPatternData));
13146 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
13147 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
13148 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
13149 int32_t *fwk_testPatternData =
13150 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
13151 testPatternData.r = fwk_testPatternData[0];
13152 testPatternData.b = fwk_testPatternData[3];
13153 switch (gCamCapability[mCameraId]->color_arrangement) {
13154 case CAM_FILTER_ARRANGEMENT_RGGB:
13155 case CAM_FILTER_ARRANGEMENT_GRBG:
13156 testPatternData.gr = fwk_testPatternData[1];
13157 testPatternData.gb = fwk_testPatternData[2];
13158 break;
13159 case CAM_FILTER_ARRANGEMENT_GBRG:
13160 case CAM_FILTER_ARRANGEMENT_BGGR:
13161 testPatternData.gr = fwk_testPatternData[2];
13162 testPatternData.gb = fwk_testPatternData[1];
13163 break;
13164 default:
13165 LOGE("color arrangement %d is not supported",
13166 gCamCapability[mCameraId]->color_arrangement);
13167 break;
13168 }
13169 }
13170 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
13171 testPatternData)) {
13172 rc = BAD_VALUE;
13173 }
13174 } else {
13175 LOGE("Invalid framework sensor test pattern mode %d",
13176 fwk_testPatternMode);
13177 }
13178 }
13179
13180 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
13181 size_t count = 0;
13182 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
13183 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
13184 gps_coords.data.d, gps_coords.count, count);
13185 if (gps_coords.count != count) {
13186 rc = BAD_VALUE;
13187 }
13188 }
13189
13190 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
13191 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
13192 size_t count = 0;
13193 const char *gps_methods_src = (const char *)
13194 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
13195 memset(gps_methods, '\0', sizeof(gps_methods));
13196 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
13197 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
13198 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
13199 if (GPS_PROCESSING_METHOD_SIZE != count) {
13200 rc = BAD_VALUE;
13201 }
13202 }
13203
13204 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
13205 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
13206 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
13207 gps_timestamp)) {
13208 rc = BAD_VALUE;
13209 }
13210 }
13211
13212 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
13213 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
13214 cam_rotation_info_t rotation_info;
13215 if (orientation == 0) {
13216 rotation_info.rotation = ROTATE_0;
13217 } else if (orientation == 90) {
13218 rotation_info.rotation = ROTATE_90;
13219 } else if (orientation == 180) {
13220 rotation_info.rotation = ROTATE_180;
13221 } else if (orientation == 270) {
13222 rotation_info.rotation = ROTATE_270;
13223 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070013224 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070013225 rotation_info.streamId = snapshotStreamId;
13226 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
13227 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
13228 rc = BAD_VALUE;
13229 }
13230 }
13231
13232 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
13233 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
13234 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
13235 rc = BAD_VALUE;
13236 }
13237 }
13238
13239 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13240 uint32_t thumb_quality = (uint32_t)
13241 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13242 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13243 thumb_quality)) {
13244 rc = BAD_VALUE;
13245 }
13246 }
13247
13248 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13249 cam_dimension_t dim;
13250 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13251 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13252 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13253 rc = BAD_VALUE;
13254 }
13255 }
13256
13257 // Internal metadata
13258 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13259 size_t count = 0;
13260 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13261 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13262 privatedata.data.i32, privatedata.count, count);
13263 if (privatedata.count != count) {
13264 rc = BAD_VALUE;
13265 }
13266 }
13267
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013268 // ISO/Exposure Priority
13269 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13270 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13271 cam_priority_mode_t mode =
13272 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13273 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13274 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13275 use_iso_exp_pty.previewOnly = FALSE;
13276 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13277 use_iso_exp_pty.value = *ptr;
13278
13279 if(CAM_ISO_PRIORITY == mode) {
13280 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13281 use_iso_exp_pty)) {
13282 rc = BAD_VALUE;
13283 }
13284 }
13285 else {
13286 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13287 use_iso_exp_pty)) {
13288 rc = BAD_VALUE;
13289 }
13290 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013291
13292 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13293 rc = BAD_VALUE;
13294 }
13295 }
13296 } else {
13297 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13298 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013299 }
13300 }
13301
13302 // Saturation
13303 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13304 int32_t* use_saturation =
13305 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13306 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13307 rc = BAD_VALUE;
13308 }
13309 }
13310
Thierry Strudel3d639192016-09-09 11:52:26 -070013311 // EV step
13312 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13313 gCamCapability[mCameraId]->exp_compensation_step)) {
13314 rc = BAD_VALUE;
13315 }
13316
13317 // CDS info
13318 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13319 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13320 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13321
13322 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13323 CAM_INTF_META_CDS_DATA, *cdsData)) {
13324 rc = BAD_VALUE;
13325 }
13326 }
13327
Shuzhen Wang19463d72016-03-08 11:09:52 -080013328 // Hybrid AE
13329 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13330 uint8_t *hybrid_ae = (uint8_t *)
13331 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
Shuzhen Wang77b049a2017-08-30 12:24:36 -070013332 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13333 rc = BAD_VALUE;
13334 }
Shuzhen Wang19463d72016-03-08 11:09:52 -080013335 }
13336
Shuzhen Wang14415f52016-11-16 18:26:18 -080013337 // Histogram
13338 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13339 uint8_t histogramMode =
13340 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13341 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13342 histogramMode)) {
13343 rc = BAD_VALUE;
13344 }
13345 }
13346
13347 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13348 int32_t histogramBins =
13349 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13350 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13351 histogramBins)) {
13352 rc = BAD_VALUE;
13353 }
13354 }
13355
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013356 // Tracking AF
13357 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13358 uint8_t trackingAfTrigger =
13359 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13360 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13361 trackingAfTrigger)) {
13362 rc = BAD_VALUE;
13363 }
13364 }
13365
Chien-Yu Chendbd619b2017-08-04 17:50:11 -070013366 // Makernote
13367 camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13368 if (entry.count != 0) {
13369 if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13370 cam_makernote_t makernote;
13371 makernote.length = entry.count;
13372 memcpy(makernote.data, entry.data.u8, makernote.length);
13373 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13374 rc = BAD_VALUE;
13375 }
13376 } else {
13377 ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13378 MAX_MAKERNOTE_LENGTH);
13379 rc = BAD_VALUE;
13380 }
13381 }
13382
Thierry Strudel3d639192016-09-09 11:52:26 -070013383 return rc;
13384}
13385
13386/*===========================================================================
13387 * FUNCTION : captureResultCb
13388 *
13389 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13390 *
13391 * PARAMETERS :
13392 * @frame : frame information from mm-camera-interface
13393 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13394 * @userdata: userdata
13395 *
13396 * RETURN : NONE
13397 *==========================================================================*/
13398void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13399 camera3_stream_buffer_t *buffer,
13400 uint32_t frame_number, bool isInputBuffer, void *userdata)
13401{
13402 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13403 if (hw == NULL) {
13404 LOGE("Invalid hw %p", hw);
13405 return;
13406 }
13407
13408 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13409 return;
13410}
13411
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013412/*===========================================================================
13413 * FUNCTION : setBufferErrorStatus
13414 *
13415 * DESCRIPTION: Callback handler for channels to report any buffer errors
13416 *
13417 * PARAMETERS :
13418 * @ch : Channel on which buffer error is reported from
13419 * @frame_number : frame number on which buffer error is reported on
13420 * @buffer_status : buffer error status
13421 * @userdata: userdata
13422 *
13423 * RETURN : NONE
13424 *==========================================================================*/
13425void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13426 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13427{
13428 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13429 if (hw == NULL) {
13430 LOGE("Invalid hw %p", hw);
13431 return;
13432 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013433
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013434 hw->setBufferErrorStatus(ch, frame_number, err);
13435 return;
13436}
13437
13438void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13439 uint32_t frameNumber, camera3_buffer_status_t err)
13440{
13441 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13442 pthread_mutex_lock(&mMutex);
13443
13444 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13445 if (req.frame_number != frameNumber)
13446 continue;
13447 for (auto& k : req.mPendingBufferList) {
13448 if(k.stream->priv == ch) {
13449 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13450 }
13451 }
13452 }
13453
13454 pthread_mutex_unlock(&mMutex);
13455 return;
13456}
Thierry Strudel3d639192016-09-09 11:52:26 -070013457/*===========================================================================
13458 * FUNCTION : initialize
13459 *
13460 * DESCRIPTION: Pass framework callback pointers to HAL
13461 *
13462 * PARAMETERS :
13463 *
13464 *
13465 * RETURN : Success : 0
13466 * Failure: -ENODEV
13467 *==========================================================================*/
13468
13469int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13470 const camera3_callback_ops_t *callback_ops)
13471{
13472 LOGD("E");
13473 QCamera3HardwareInterface *hw =
13474 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13475 if (!hw) {
13476 LOGE("NULL camera device");
13477 return -ENODEV;
13478 }
13479
13480 int rc = hw->initialize(callback_ops);
13481 LOGD("X");
13482 return rc;
13483}
13484
13485/*===========================================================================
13486 * FUNCTION : configure_streams
13487 *
13488 * DESCRIPTION:
13489 *
13490 * PARAMETERS :
13491 *
13492 *
13493 * RETURN : Success: 0
13494 * Failure: -EINVAL (if stream configuration is invalid)
13495 * -ENODEV (fatal error)
13496 *==========================================================================*/
13497
13498int QCamera3HardwareInterface::configure_streams(
13499 const struct camera3_device *device,
13500 camera3_stream_configuration_t *stream_list)
13501{
13502 LOGD("E");
13503 QCamera3HardwareInterface *hw =
13504 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13505 if (!hw) {
13506 LOGE("NULL camera device");
13507 return -ENODEV;
13508 }
13509 int rc = hw->configureStreams(stream_list);
13510 LOGD("X");
13511 return rc;
13512}
13513
13514/*===========================================================================
13515 * FUNCTION : construct_default_request_settings
13516 *
13517 * DESCRIPTION: Configure a settings buffer to meet the required use case
13518 *
13519 * PARAMETERS :
13520 *
13521 *
13522 * RETURN : Success: Return valid metadata
13523 * Failure: Return NULL
13524 *==========================================================================*/
13525const camera_metadata_t* QCamera3HardwareInterface::
13526 construct_default_request_settings(const struct camera3_device *device,
13527 int type)
13528{
13529
13530 LOGD("E");
13531 camera_metadata_t* fwk_metadata = NULL;
13532 QCamera3HardwareInterface *hw =
13533 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13534 if (!hw) {
13535 LOGE("NULL camera device");
13536 return NULL;
13537 }
13538
13539 fwk_metadata = hw->translateCapabilityToMetadata(type);
13540
13541 LOGD("X");
13542 return fwk_metadata;
13543}
13544
13545/*===========================================================================
13546 * FUNCTION : process_capture_request
13547 *
13548 * DESCRIPTION:
13549 *
13550 * PARAMETERS :
13551 *
13552 *
13553 * RETURN :
13554 *==========================================================================*/
13555int QCamera3HardwareInterface::process_capture_request(
13556 const struct camera3_device *device,
13557 camera3_capture_request_t *request)
13558{
13559 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013560 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013561 QCamera3HardwareInterface *hw =
13562 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13563 if (!hw) {
13564 LOGE("NULL camera device");
13565 return -EINVAL;
13566 }
13567
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013568 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013569 LOGD("X");
13570 return rc;
13571}
13572
13573/*===========================================================================
13574 * FUNCTION : dump
13575 *
13576 * DESCRIPTION:
13577 *
13578 * PARAMETERS :
13579 *
13580 *
13581 * RETURN :
13582 *==========================================================================*/
13583
13584void QCamera3HardwareInterface::dump(
13585 const struct camera3_device *device, int fd)
13586{
13587 /* Log level property is read when "adb shell dumpsys media.camera" is
13588 called so that the log level can be controlled without restarting
13589 the media server */
13590 getLogLevel();
13591
13592 LOGD("E");
13593 QCamera3HardwareInterface *hw =
13594 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13595 if (!hw) {
13596 LOGE("NULL camera device");
13597 return;
13598 }
13599
13600 hw->dump(fd);
13601 LOGD("X");
13602 return;
13603}
13604
13605/*===========================================================================
13606 * FUNCTION : flush
13607 *
13608 * DESCRIPTION:
13609 *
13610 * PARAMETERS :
13611 *
13612 *
13613 * RETURN :
13614 *==========================================================================*/
13615
13616int QCamera3HardwareInterface::flush(
13617 const struct camera3_device *device)
13618{
13619 int rc;
13620 LOGD("E");
13621 QCamera3HardwareInterface *hw =
13622 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13623 if (!hw) {
13624 LOGE("NULL camera device");
13625 return -EINVAL;
13626 }
13627
13628 pthread_mutex_lock(&hw->mMutex);
13629 // Validate current state
13630 switch (hw->mState) {
13631 case STARTED:
13632 /* valid state */
13633 break;
13634
13635 case ERROR:
13636 pthread_mutex_unlock(&hw->mMutex);
13637 hw->handleCameraDeviceError();
13638 return -ENODEV;
13639
13640 default:
13641 LOGI("Flush returned during state %d", hw->mState);
13642 pthread_mutex_unlock(&hw->mMutex);
13643 return 0;
13644 }
13645 pthread_mutex_unlock(&hw->mMutex);
13646
13647 rc = hw->flush(true /* restart channels */ );
13648 LOGD("X");
13649 return rc;
13650}
13651
13652/*===========================================================================
13653 * FUNCTION : close_camera_device
13654 *
13655 * DESCRIPTION:
13656 *
13657 * PARAMETERS :
13658 *
13659 *
13660 * RETURN :
13661 *==========================================================================*/
13662int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13663{
13664 int ret = NO_ERROR;
13665 QCamera3HardwareInterface *hw =
13666 reinterpret_cast<QCamera3HardwareInterface *>(
13667 reinterpret_cast<camera3_device_t *>(device)->priv);
13668 if (!hw) {
13669 LOGE("NULL camera device");
13670 return BAD_VALUE;
13671 }
13672
13673 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13674 delete hw;
13675 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013676 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013677 return ret;
13678}
13679
13680/*===========================================================================
13681 * FUNCTION : getWaveletDenoiseProcessPlate
13682 *
13683 * DESCRIPTION: query wavelet denoise process plate
13684 *
13685 * PARAMETERS : None
13686 *
13687 * RETURN : WNR prcocess plate value
13688 *==========================================================================*/
13689cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13690{
13691 char prop[PROPERTY_VALUE_MAX];
13692 memset(prop, 0, sizeof(prop));
13693 property_get("persist.denoise.process.plates", prop, "0");
13694 int processPlate = atoi(prop);
13695 switch(processPlate) {
13696 case 0:
13697 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13698 case 1:
13699 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13700 case 2:
13701 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13702 case 3:
13703 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13704 default:
13705 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13706 }
13707}
13708
13709
13710/*===========================================================================
13711 * FUNCTION : getTemporalDenoiseProcessPlate
13712 *
13713 * DESCRIPTION: query temporal denoise process plate
13714 *
13715 * PARAMETERS : None
13716 *
13717 * RETURN : TNR prcocess plate value
13718 *==========================================================================*/
13719cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13720{
13721 char prop[PROPERTY_VALUE_MAX];
13722 memset(prop, 0, sizeof(prop));
13723 property_get("persist.tnr.process.plates", prop, "0");
13724 int processPlate = atoi(prop);
13725 switch(processPlate) {
13726 case 0:
13727 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13728 case 1:
13729 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13730 case 2:
13731 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13732 case 3:
13733 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13734 default:
13735 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13736 }
13737}
13738
13739
13740/*===========================================================================
13741 * FUNCTION : extractSceneMode
13742 *
13743 * DESCRIPTION: Extract scene mode from frameworks set metadata
13744 *
13745 * PARAMETERS :
13746 * @frame_settings: CameraMetadata reference
13747 * @metaMode: ANDROID_CONTORL_MODE
13748 * @hal_metadata: hal metadata structure
13749 *
13750 * RETURN : None
13751 *==========================================================================*/
13752int32_t QCamera3HardwareInterface::extractSceneMode(
13753 const CameraMetadata &frame_settings, uint8_t metaMode,
13754 metadata_buffer_t *hal_metadata)
13755{
13756 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013757 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13758
13759 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13760 LOGD("Ignoring control mode OFF_KEEP_STATE");
13761 return NO_ERROR;
13762 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013763
13764 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13765 camera_metadata_ro_entry entry =
13766 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13767 if (0 == entry.count)
13768 return rc;
13769
13770 uint8_t fwk_sceneMode = entry.data.u8[0];
13771
13772 int val = lookupHalName(SCENE_MODES_MAP,
13773 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13774 fwk_sceneMode);
13775 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013776 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013777 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013778 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013779 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013780
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013781 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13782 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13783 }
13784
13785 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13786 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013787 cam_hdr_param_t hdr_params;
13788 hdr_params.hdr_enable = 1;
13789 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13790 hdr_params.hdr_need_1x = false;
13791 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13792 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13793 rc = BAD_VALUE;
13794 }
13795 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013796
Thierry Strudel3d639192016-09-09 11:52:26 -070013797 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13798 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13799 rc = BAD_VALUE;
13800 }
13801 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013802
13803 if (mForceHdrSnapshot) {
13804 cam_hdr_param_t hdr_params;
13805 hdr_params.hdr_enable = 1;
13806 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13807 hdr_params.hdr_need_1x = false;
13808 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13809 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13810 rc = BAD_VALUE;
13811 }
13812 }
13813
Thierry Strudel3d639192016-09-09 11:52:26 -070013814 return rc;
13815}
13816
13817/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013818 * FUNCTION : setVideoHdrMode
13819 *
13820 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13821 *
13822 * PARAMETERS :
13823 * @hal_metadata: hal metadata structure
13824 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13825 *
13826 * RETURN : None
13827 *==========================================================================*/
13828int32_t QCamera3HardwareInterface::setVideoHdrMode(
13829 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13830{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013831 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13832 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13833 }
13834
13835 LOGE("Invalid Video HDR mode %d!", vhdr);
13836 return BAD_VALUE;
13837}
13838
13839/*===========================================================================
13840 * FUNCTION : setSensorHDR
13841 *
13842 * DESCRIPTION: Enable/disable sensor HDR.
13843 *
13844 * PARAMETERS :
13845 * @hal_metadata: hal metadata structure
13846 * @enable: boolean whether to enable/disable sensor HDR
13847 *
13848 * RETURN : None
13849 *==========================================================================*/
13850int32_t QCamera3HardwareInterface::setSensorHDR(
13851 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13852{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013853 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013854 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13855
13856 if (enable) {
13857 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13858 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13859 #ifdef _LE_CAMERA_
13860 //Default to staggered HDR for IOT
13861 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13862 #else
13863 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13864 #endif
13865 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13866 }
13867
13868 bool isSupported = false;
13869 switch (sensor_hdr) {
13870 case CAM_SENSOR_HDR_IN_SENSOR:
13871 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13872 CAM_QCOM_FEATURE_SENSOR_HDR) {
13873 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013874 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013875 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013876 break;
13877 case CAM_SENSOR_HDR_ZIGZAG:
13878 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13879 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13880 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013881 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013882 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013883 break;
13884 case CAM_SENSOR_HDR_STAGGERED:
13885 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13886 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13887 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013888 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013889 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013890 break;
13891 case CAM_SENSOR_HDR_OFF:
13892 isSupported = true;
13893 LOGD("Turning off sensor HDR");
13894 break;
13895 default:
13896 LOGE("HDR mode %d not supported", sensor_hdr);
13897 rc = BAD_VALUE;
13898 break;
13899 }
13900
13901 if(isSupported) {
13902 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13903 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13904 rc = BAD_VALUE;
13905 } else {
13906 if(!isVideoHdrEnable)
13907 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013908 }
13909 }
13910 return rc;
13911}
13912
13913/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013914 * FUNCTION : needRotationReprocess
13915 *
13916 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13917 *
13918 * PARAMETERS : none
13919 *
13920 * RETURN : true: needed
13921 * false: no need
13922 *==========================================================================*/
13923bool QCamera3HardwareInterface::needRotationReprocess()
13924{
13925 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13926 // current rotation is not zero, and pp has the capability to process rotation
13927 LOGH("need do reprocess for rotation");
13928 return true;
13929 }
13930
13931 return false;
13932}
13933
13934/*===========================================================================
13935 * FUNCTION : needReprocess
13936 *
13937 * DESCRIPTION: if reprocess in needed
13938 *
13939 * PARAMETERS : none
13940 *
13941 * RETURN : true: needed
13942 * false: no need
13943 *==========================================================================*/
13944bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13945{
13946 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13947 // TODO: add for ZSL HDR later
13948 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13949 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13950 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13951 return true;
13952 } else {
13953 LOGH("already post processed frame");
13954 return false;
13955 }
13956 }
13957 return needRotationReprocess();
13958}
13959
13960/*===========================================================================
13961 * FUNCTION : needJpegExifRotation
13962 *
13963 * DESCRIPTION: if rotation from jpeg is needed
13964 *
13965 * PARAMETERS : none
13966 *
13967 * RETURN : true: needed
13968 * false: no need
13969 *==========================================================================*/
13970bool QCamera3HardwareInterface::needJpegExifRotation()
13971{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013972 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013973 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13974 LOGD("Need use Jpeg EXIF Rotation");
13975 return true;
13976 }
13977 return false;
13978}
13979
13980/*===========================================================================
13981 * FUNCTION : addOfflineReprocChannel
13982 *
13983 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13984 * coming from input channel
13985 *
13986 * PARAMETERS :
13987 * @config : reprocess configuration
13988 * @inputChHandle : pointer to the input (source) channel
13989 *
13990 *
13991 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13992 *==========================================================================*/
13993QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13994 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13995{
13996 int32_t rc = NO_ERROR;
13997 QCamera3ReprocessChannel *pChannel = NULL;
13998
13999 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014000 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
14001 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070014002 if (NULL == pChannel) {
14003 LOGE("no mem for reprocess channel");
14004 return NULL;
14005 }
14006
14007 rc = pChannel->initialize(IS_TYPE_NONE);
14008 if (rc != NO_ERROR) {
14009 LOGE("init reprocess channel failed, ret = %d", rc);
14010 delete pChannel;
14011 return NULL;
14012 }
14013
14014 // pp feature config
14015 cam_pp_feature_config_t pp_config;
14016 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
14017
14018 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
14019 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
14020 & CAM_QCOM_FEATURE_DSDN) {
14021 //Use CPP CDS incase h/w supports it.
14022 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
14023 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
14024 }
14025 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
14026 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
14027 }
14028
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014029 if (config.hdr_param.hdr_enable) {
14030 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
14031 pp_config.hdr_param = config.hdr_param;
14032 }
14033
14034 if (mForceHdrSnapshot) {
14035 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
14036 pp_config.hdr_param.hdr_enable = 1;
14037 pp_config.hdr_param.hdr_need_1x = 0;
14038 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
14039 }
14040
Thierry Strudel3d639192016-09-09 11:52:26 -070014041 rc = pChannel->addReprocStreamsFromSource(pp_config,
14042 config,
14043 IS_TYPE_NONE,
14044 mMetadataChannel);
14045
14046 if (rc != NO_ERROR) {
14047 delete pChannel;
14048 return NULL;
14049 }
14050 return pChannel;
14051}
14052
14053/*===========================================================================
14054 * FUNCTION : getMobicatMask
14055 *
14056 * DESCRIPTION: returns mobicat mask
14057 *
14058 * PARAMETERS : none
14059 *
14060 * RETURN : mobicat mask
14061 *
14062 *==========================================================================*/
14063uint8_t QCamera3HardwareInterface::getMobicatMask()
14064{
14065 return m_MobicatMask;
14066}
14067
14068/*===========================================================================
14069 * FUNCTION : setMobicat
14070 *
14071 * DESCRIPTION: set Mobicat on/off.
14072 *
14073 * PARAMETERS :
14074 * @params : none
14075 *
14076 * RETURN : int32_t type of status
14077 * NO_ERROR -- success
14078 * none-zero failure code
14079 *==========================================================================*/
14080int32_t QCamera3HardwareInterface::setMobicat()
14081{
Thierry Strudel3d639192016-09-09 11:52:26 -070014082 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014083
Shuzhen Wangb57ec912017-07-31 13:24:27 -070014084 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070014085 tune_cmd_t tune_cmd;
14086 tune_cmd.type = SET_RELOAD_CHROMATIX;
14087 tune_cmd.module = MODULE_ALL;
14088 tune_cmd.value = TRUE;
14089 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14090 CAM_INTF_PARM_SET_VFE_COMMAND,
14091 tune_cmd);
14092
14093 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14094 CAM_INTF_PARM_SET_PP_COMMAND,
14095 tune_cmd);
14096 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014097
14098 return ret;
14099}
14100
14101/*===========================================================================
14102* FUNCTION : getLogLevel
14103*
14104* DESCRIPTION: Reads the log level property into a variable
14105*
14106* PARAMETERS :
14107* None
14108*
14109* RETURN :
14110* None
14111*==========================================================================*/
14112void QCamera3HardwareInterface::getLogLevel()
14113{
14114 char prop[PROPERTY_VALUE_MAX];
14115 uint32_t globalLogLevel = 0;
14116
14117 property_get("persist.camera.hal.debug", prop, "0");
14118 int val = atoi(prop);
14119 if (0 <= val) {
14120 gCamHal3LogLevel = (uint32_t)val;
14121 }
14122
Thierry Strudel9ec39c62016-12-28 11:30:05 -080014123 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070014124 gKpiDebugLevel = atoi(prop);
14125
14126 property_get("persist.camera.global.debug", prop, "0");
14127 val = atoi(prop);
14128 if (0 <= val) {
14129 globalLogLevel = (uint32_t)val;
14130 }
14131
14132 /* Highest log level among hal.logs and global.logs is selected */
14133 if (gCamHal3LogLevel < globalLogLevel)
14134 gCamHal3LogLevel = globalLogLevel;
14135
14136 return;
14137}
14138
14139/*===========================================================================
14140 * FUNCTION : validateStreamRotations
14141 *
14142 * DESCRIPTION: Check if the rotations requested are supported
14143 *
14144 * PARAMETERS :
14145 * @stream_list : streams to be configured
14146 *
14147 * RETURN : NO_ERROR on success
14148 * -EINVAL on failure
14149 *
14150 *==========================================================================*/
14151int QCamera3HardwareInterface::validateStreamRotations(
14152 camera3_stream_configuration_t *streamList)
14153{
14154 int rc = NO_ERROR;
14155
14156 /*
14157 * Loop through all streams requested in configuration
14158 * Check if unsupported rotations have been requested on any of them
14159 */
14160 for (size_t j = 0; j < streamList->num_streams; j++){
14161 camera3_stream_t *newStream = streamList->streams[j];
14162
Emilian Peev35ceeed2017-06-29 11:58:56 -070014163 switch(newStream->rotation) {
14164 case CAMERA3_STREAM_ROTATION_0:
14165 case CAMERA3_STREAM_ROTATION_90:
14166 case CAMERA3_STREAM_ROTATION_180:
14167 case CAMERA3_STREAM_ROTATION_270:
14168 //Expected values
14169 break;
14170 default:
14171 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
14172 "type:%d and stream format:%d", __func__,
14173 newStream->rotation, newStream->stream_type,
14174 newStream->format);
14175 return -EINVAL;
14176 }
14177
Thierry Strudel3d639192016-09-09 11:52:26 -070014178 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
14179 bool isImplDef = (newStream->format ==
14180 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
14181 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
14182 isImplDef);
14183
14184 if (isRotated && (!isImplDef || isZsl)) {
14185 LOGE("Error: Unsupported rotation of %d requested for stream"
14186 "type:%d and stream format:%d",
14187 newStream->rotation, newStream->stream_type,
14188 newStream->format);
14189 rc = -EINVAL;
14190 break;
14191 }
14192 }
14193
14194 return rc;
14195}
14196
14197/*===========================================================================
14198* FUNCTION : getFlashInfo
14199*
14200* DESCRIPTION: Retrieve information about whether the device has a flash.
14201*
14202* PARAMETERS :
14203* @cameraId : Camera id to query
14204* @hasFlash : Boolean indicating whether there is a flash device
14205* associated with given camera
14206* @flashNode : If a flash device exists, this will be its device node.
14207*
14208* RETURN :
14209* None
14210*==========================================================================*/
14211void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
14212 bool& hasFlash,
14213 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
14214{
14215 cam_capability_t* camCapability = gCamCapability[cameraId];
14216 if (NULL == camCapability) {
14217 hasFlash = false;
14218 flashNode[0] = '\0';
14219 } else {
14220 hasFlash = camCapability->flash_available;
14221 strlcpy(flashNode,
14222 (char*)camCapability->flash_dev_name,
14223 QCAMERA_MAX_FILEPATH_LENGTH);
14224 }
14225}
14226
14227/*===========================================================================
14228* FUNCTION : getEepromVersionInfo
14229*
14230* DESCRIPTION: Retrieve version info of the sensor EEPROM data
14231*
14232* PARAMETERS : None
14233*
14234* RETURN : string describing EEPROM version
14235* "\0" if no such info available
14236*==========================================================================*/
14237const char *QCamera3HardwareInterface::getEepromVersionInfo()
14238{
14239 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14240}
14241
14242/*===========================================================================
14243* FUNCTION : getLdafCalib
14244*
14245* DESCRIPTION: Retrieve Laser AF calibration data
14246*
14247* PARAMETERS : None
14248*
14249* RETURN : Two uint32_t describing laser AF calibration data
14250* NULL if none is available.
14251*==========================================================================*/
14252const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14253{
14254 if (mLdafCalibExist) {
14255 return &mLdafCalib[0];
14256 } else {
14257 return NULL;
14258 }
14259}
14260
14261/*===========================================================================
Arnd Geis082a4d72017-08-24 10:33:07 -070014262* FUNCTION : getEaselFwVersion
14263*
14264* DESCRIPTION: Retrieve Easel firmware version
14265*
14266* PARAMETERS : None
14267*
14268* RETURN : string describing Firmware version
Arnd Geis8cbfc182017-09-07 14:46:41 -070014269* "\0" if version is not up to date
Arnd Geis082a4d72017-08-24 10:33:07 -070014270*==========================================================================*/
14271const char *QCamera3HardwareInterface::getEaselFwVersion()
14272{
Arnd Geis8cbfc182017-09-07 14:46:41 -070014273 if (mEaselFwUpdated) {
14274 return (const char *)&mEaselFwVersion[0];
14275 } else {
14276 return NULL;
Arnd Geis082a4d72017-08-24 10:33:07 -070014277 }
Arnd Geis082a4d72017-08-24 10:33:07 -070014278}
14279
14280/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014281 * FUNCTION : dynamicUpdateMetaStreamInfo
14282 *
14283 * DESCRIPTION: This function:
14284 * (1) stops all the channels
14285 * (2) returns error on pending requests and buffers
14286 * (3) sends metastream_info in setparams
14287 * (4) starts all channels
14288 * This is useful when sensor has to be restarted to apply any
14289 * settings such as frame rate from a different sensor mode
14290 *
14291 * PARAMETERS : None
14292 *
14293 * RETURN : NO_ERROR on success
14294 * Error codes on failure
14295 *
14296 *==========================================================================*/
14297int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14298{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014299 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014300 int rc = NO_ERROR;
14301
14302 LOGD("E");
14303
14304 rc = stopAllChannels();
14305 if (rc < 0) {
14306 LOGE("stopAllChannels failed");
14307 return rc;
14308 }
14309
14310 rc = notifyErrorForPendingRequests();
14311 if (rc < 0) {
14312 LOGE("notifyErrorForPendingRequests failed");
14313 return rc;
14314 }
14315
14316 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14317 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14318 "Format:%d",
14319 mStreamConfigInfo.type[i],
14320 mStreamConfigInfo.stream_sizes[i].width,
14321 mStreamConfigInfo.stream_sizes[i].height,
14322 mStreamConfigInfo.postprocess_mask[i],
14323 mStreamConfigInfo.format[i]);
14324 }
14325
14326 /* Send meta stream info once again so that ISP can start */
14327 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14328 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14329 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14330 mParameters);
14331 if (rc < 0) {
14332 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14333 }
14334
14335 rc = startAllChannels();
14336 if (rc < 0) {
14337 LOGE("startAllChannels failed");
14338 return rc;
14339 }
14340
14341 LOGD("X");
14342 return rc;
14343}
14344
14345/*===========================================================================
14346 * FUNCTION : stopAllChannels
14347 *
14348 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14349 *
14350 * PARAMETERS : None
14351 *
14352 * RETURN : NO_ERROR on success
14353 * Error codes on failure
14354 *
14355 *==========================================================================*/
14356int32_t QCamera3HardwareInterface::stopAllChannels()
14357{
14358 int32_t rc = NO_ERROR;
14359
14360 LOGD("Stopping all channels");
14361 // Stop the Streams/Channels
14362 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14363 it != mStreamInfo.end(); it++) {
14364 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14365 if (channel) {
14366 channel->stop();
14367 }
14368 (*it)->status = INVALID;
14369 }
14370
14371 if (mSupportChannel) {
14372 mSupportChannel->stop();
14373 }
14374 if (mAnalysisChannel) {
14375 mAnalysisChannel->stop();
14376 }
14377 if (mRawDumpChannel) {
14378 mRawDumpChannel->stop();
14379 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014380 if (mHdrPlusRawSrcChannel) {
14381 mHdrPlusRawSrcChannel->stop();
14382 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014383 if (mMetadataChannel) {
14384 /* If content of mStreamInfo is not 0, there is metadata stream */
14385 mMetadataChannel->stop();
14386 }
14387
14388 LOGD("All channels stopped");
14389 return rc;
14390}
14391
14392/*===========================================================================
14393 * FUNCTION : startAllChannels
14394 *
14395 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14396 *
14397 * PARAMETERS : None
14398 *
14399 * RETURN : NO_ERROR on success
14400 * Error codes on failure
14401 *
14402 *==========================================================================*/
14403int32_t QCamera3HardwareInterface::startAllChannels()
14404{
14405 int32_t rc = NO_ERROR;
14406
14407 LOGD("Start all channels ");
14408 // Start the Streams/Channels
14409 if (mMetadataChannel) {
14410 /* If content of mStreamInfo is not 0, there is metadata stream */
14411 rc = mMetadataChannel->start();
14412 if (rc < 0) {
14413 LOGE("META channel start failed");
14414 return rc;
14415 }
14416 }
14417 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14418 it != mStreamInfo.end(); it++) {
14419 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14420 if (channel) {
14421 rc = channel->start();
14422 if (rc < 0) {
14423 LOGE("channel start failed");
14424 return rc;
14425 }
14426 }
14427 }
14428 if (mAnalysisChannel) {
14429 mAnalysisChannel->start();
14430 }
14431 if (mSupportChannel) {
14432 rc = mSupportChannel->start();
14433 if (rc < 0) {
14434 LOGE("Support channel start failed");
14435 return rc;
14436 }
14437 }
14438 if (mRawDumpChannel) {
14439 rc = mRawDumpChannel->start();
14440 if (rc < 0) {
14441 LOGE("RAW dump channel start failed");
14442 return rc;
14443 }
14444 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014445 if (mHdrPlusRawSrcChannel) {
14446 rc = mHdrPlusRawSrcChannel->start();
14447 if (rc < 0) {
14448 LOGE("HDR+ RAW channel start failed");
14449 return rc;
14450 }
14451 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014452
14453 LOGD("All channels started");
14454 return rc;
14455}
14456
14457/*===========================================================================
14458 * FUNCTION : notifyErrorForPendingRequests
14459 *
14460 * DESCRIPTION: This function sends error for all the pending requests/buffers
14461 *
14462 * PARAMETERS : None
14463 *
14464 * RETURN : Error codes
14465 * NO_ERROR on success
14466 *
14467 *==========================================================================*/
14468int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14469{
Emilian Peev7650c122017-01-19 08:24:33 -080014470 notifyErrorFoPendingDepthData(mDepthChannel);
14471
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014472 auto pendingRequest = mPendingRequestsList.begin();
14473 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014474
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014475 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14476 // buffers (for which buffers aren't sent yet).
14477 while (pendingRequest != mPendingRequestsList.end() ||
14478 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14479 if (pendingRequest == mPendingRequestsList.end() ||
14480 pendingBuffer->frame_number < pendingRequest->frame_number) {
14481 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14482 // with error.
14483 for (auto &info : pendingBuffer->mPendingBufferList) {
14484 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014485 camera3_notify_msg_t notify_msg;
14486 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14487 notify_msg.type = CAMERA3_MSG_ERROR;
14488 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014489 notify_msg.message.error.error_stream = info.stream;
14490 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014491 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014492
14493 camera3_stream_buffer_t buffer = {};
14494 buffer.acquire_fence = -1;
14495 buffer.release_fence = -1;
14496 buffer.buffer = info.buffer;
14497 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14498 buffer.stream = info.stream;
14499 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014500 }
14501
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014502 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14503 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14504 pendingBuffer->frame_number > pendingRequest->frame_number) {
14505 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014506 camera3_notify_msg_t notify_msg;
14507 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14508 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014509 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14510 notify_msg.message.error.error_stream = nullptr;
14511 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014512 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014513
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014514 if (pendingRequest->input_buffer != nullptr) {
14515 camera3_capture_result result = {};
14516 result.frame_number = pendingRequest->frame_number;
14517 result.result = nullptr;
14518 result.input_buffer = pendingRequest->input_buffer;
14519 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014520 }
14521
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014522 mShutterDispatcher.clear(pendingRequest->frame_number);
14523 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14524 } else {
14525 // If both buffers and result metadata weren't sent yet, notify about a request error
14526 // and return buffers with error.
14527 for (auto &info : pendingBuffer->mPendingBufferList) {
14528 camera3_notify_msg_t notify_msg;
14529 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14530 notify_msg.type = CAMERA3_MSG_ERROR;
14531 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14532 notify_msg.message.error.error_stream = info.stream;
14533 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14534 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014535
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014536 camera3_stream_buffer_t buffer = {};
14537 buffer.acquire_fence = -1;
14538 buffer.release_fence = -1;
14539 buffer.buffer = info.buffer;
14540 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14541 buffer.stream = info.stream;
14542 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14543 }
14544
14545 if (pendingRequest->input_buffer != nullptr) {
14546 camera3_capture_result result = {};
14547 result.frame_number = pendingRequest->frame_number;
14548 result.result = nullptr;
14549 result.input_buffer = pendingRequest->input_buffer;
14550 orchestrateResult(&result);
14551 }
14552
14553 mShutterDispatcher.clear(pendingRequest->frame_number);
14554 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14555 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014556 }
14557 }
14558
14559 /* Reset pending frame Drop list and requests list */
14560 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014561 mShutterDispatcher.clear();
14562 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014563 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014564 mExpectedFrameDuration = 0;
14565 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014566 LOGH("Cleared all the pending buffers ");
14567
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014568 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014569}
14570
14571bool QCamera3HardwareInterface::isOnEncoder(
14572 const cam_dimension_t max_viewfinder_size,
14573 uint32_t width, uint32_t height)
14574{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014575 return ((width > (uint32_t)max_viewfinder_size.width) ||
14576 (height > (uint32_t)max_viewfinder_size.height) ||
14577 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14578 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014579}
14580
14581/*===========================================================================
14582 * FUNCTION : setBundleInfo
14583 *
14584 * DESCRIPTION: Set bundle info for all streams that are bundle.
14585 *
14586 * PARAMETERS : None
14587 *
14588 * RETURN : NO_ERROR on success
14589 * Error codes on failure
14590 *==========================================================================*/
14591int32_t QCamera3HardwareInterface::setBundleInfo()
14592{
14593 int32_t rc = NO_ERROR;
14594
14595 if (mChannelHandle) {
14596 cam_bundle_config_t bundleInfo;
14597 memset(&bundleInfo, 0, sizeof(bundleInfo));
14598 rc = mCameraHandle->ops->get_bundle_info(
14599 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14600 if (rc != NO_ERROR) {
14601 LOGE("get_bundle_info failed");
14602 return rc;
14603 }
14604 if (mAnalysisChannel) {
14605 mAnalysisChannel->setBundleInfo(bundleInfo);
14606 }
14607 if (mSupportChannel) {
14608 mSupportChannel->setBundleInfo(bundleInfo);
14609 }
14610 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14611 it != mStreamInfo.end(); it++) {
14612 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14613 channel->setBundleInfo(bundleInfo);
14614 }
14615 if (mRawDumpChannel) {
14616 mRawDumpChannel->setBundleInfo(bundleInfo);
14617 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014618 if (mHdrPlusRawSrcChannel) {
14619 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14620 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014621 }
14622
14623 return rc;
14624}
14625
14626/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014627 * FUNCTION : setInstantAEC
14628 *
14629 * DESCRIPTION: Set Instant AEC related params.
14630 *
14631 * PARAMETERS :
14632 * @meta: CameraMetadata reference
14633 *
14634 * RETURN : NO_ERROR on success
14635 * Error codes on failure
14636 *==========================================================================*/
14637int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14638{
14639 int32_t rc = NO_ERROR;
14640 uint8_t val = 0;
14641 char prop[PROPERTY_VALUE_MAX];
14642
14643 // First try to configure instant AEC from framework metadata
14644 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14645 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14646 }
14647
14648 // If framework did not set this value, try to read from set prop.
14649 if (val == 0) {
14650 memset(prop, 0, sizeof(prop));
14651 property_get("persist.camera.instant.aec", prop, "0");
14652 val = (uint8_t)atoi(prop);
14653 }
14654
14655 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14656 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14657 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14658 mInstantAEC = val;
14659 mInstantAECSettledFrameNumber = 0;
14660 mInstantAecFrameIdxCount = 0;
14661 LOGH("instantAEC value set %d",val);
14662 if (mInstantAEC) {
14663 memset(prop, 0, sizeof(prop));
14664 property_get("persist.camera.ae.instant.bound", prop, "10");
14665 int32_t aec_frame_skip_cnt = atoi(prop);
14666 if (aec_frame_skip_cnt >= 0) {
14667 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14668 } else {
14669 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14670 rc = BAD_VALUE;
14671 }
14672 }
14673 } else {
14674 LOGE("Bad instant aec value set %d", val);
14675 rc = BAD_VALUE;
14676 }
14677 return rc;
14678}
14679
14680/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014681 * FUNCTION : get_num_overall_buffers
14682 *
14683 * DESCRIPTION: Estimate number of pending buffers across all requests.
14684 *
14685 * PARAMETERS : None
14686 *
14687 * RETURN : Number of overall pending buffers
14688 *
14689 *==========================================================================*/
14690uint32_t PendingBuffersMap::get_num_overall_buffers()
14691{
14692 uint32_t sum_buffers = 0;
14693 for (auto &req : mPendingBuffersInRequest) {
14694 sum_buffers += req.mPendingBufferList.size();
14695 }
14696 return sum_buffers;
14697}
14698
14699/*===========================================================================
14700 * FUNCTION : removeBuf
14701 *
14702 * DESCRIPTION: Remove a matching buffer from tracker.
14703 *
14704 * PARAMETERS : @buffer: image buffer for the callback
14705 *
14706 * RETURN : None
14707 *
14708 *==========================================================================*/
14709void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14710{
14711 bool buffer_found = false;
14712 for (auto req = mPendingBuffersInRequest.begin();
14713 req != mPendingBuffersInRequest.end(); req++) {
14714 for (auto k = req->mPendingBufferList.begin();
14715 k != req->mPendingBufferList.end(); k++ ) {
14716 if (k->buffer == buffer) {
14717 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14718 req->frame_number, buffer);
14719 k = req->mPendingBufferList.erase(k);
14720 if (req->mPendingBufferList.empty()) {
14721 // Remove this request from Map
14722 req = mPendingBuffersInRequest.erase(req);
14723 }
14724 buffer_found = true;
14725 break;
14726 }
14727 }
14728 if (buffer_found) {
14729 break;
14730 }
14731 }
14732 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14733 get_num_overall_buffers());
14734}
14735
14736/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014737 * FUNCTION : getBufErrStatus
14738 *
14739 * DESCRIPTION: get buffer error status
14740 *
14741 * PARAMETERS : @buffer: buffer handle
14742 *
14743 * RETURN : Error status
14744 *
14745 *==========================================================================*/
14746int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14747{
14748 for (auto& req : mPendingBuffersInRequest) {
14749 for (auto& k : req.mPendingBufferList) {
14750 if (k.buffer == buffer)
14751 return k.bufStatus;
14752 }
14753 }
14754 return CAMERA3_BUFFER_STATUS_OK;
14755}
14756
14757/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014758 * FUNCTION : setPAAFSupport
14759 *
14760 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14761 * feature mask according to stream type and filter
14762 * arrangement
14763 *
14764 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14765 * @stream_type: stream type
14766 * @filter_arrangement: filter arrangement
14767 *
14768 * RETURN : None
14769 *==========================================================================*/
14770void QCamera3HardwareInterface::setPAAFSupport(
14771 cam_feature_mask_t& feature_mask,
14772 cam_stream_type_t stream_type,
14773 cam_color_filter_arrangement_t filter_arrangement)
14774{
Thierry Strudel3d639192016-09-09 11:52:26 -070014775 switch (filter_arrangement) {
14776 case CAM_FILTER_ARRANGEMENT_RGGB:
14777 case CAM_FILTER_ARRANGEMENT_GRBG:
14778 case CAM_FILTER_ARRANGEMENT_GBRG:
14779 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014780 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14781 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014782 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014783 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14784 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014785 }
14786 break;
14787 case CAM_FILTER_ARRANGEMENT_Y:
14788 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14789 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14790 }
14791 break;
14792 default:
14793 break;
14794 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014795 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14796 feature_mask, stream_type, filter_arrangement);
14797
14798
Thierry Strudel3d639192016-09-09 11:52:26 -070014799}
14800
14801/*===========================================================================
14802* FUNCTION : getSensorMountAngle
14803*
14804* DESCRIPTION: Retrieve sensor mount angle
14805*
14806* PARAMETERS : None
14807*
14808* RETURN : sensor mount angle in uint32_t
14809*==========================================================================*/
14810uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14811{
14812 return gCamCapability[mCameraId]->sensor_mount_angle;
14813}
14814
14815/*===========================================================================
14816* FUNCTION : getRelatedCalibrationData
14817*
14818* DESCRIPTION: Retrieve related system calibration data
14819*
14820* PARAMETERS : None
14821*
14822* RETURN : Pointer of related system calibration data
14823*==========================================================================*/
14824const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14825{
14826 return (const cam_related_system_calibration_data_t *)
14827 &(gCamCapability[mCameraId]->related_cam_calibration);
14828}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014829
14830/*===========================================================================
14831 * FUNCTION : is60HzZone
14832 *
14833 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14834 *
14835 * PARAMETERS : None
14836 *
14837 * RETURN : True if in 60Hz zone, False otherwise
14838 *==========================================================================*/
14839bool QCamera3HardwareInterface::is60HzZone()
14840{
14841 time_t t = time(NULL);
14842 struct tm lt;
14843
14844 struct tm* r = localtime_r(&t, &lt);
14845
14846 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14847 return true;
14848 else
14849 return false;
14850}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014851
14852/*===========================================================================
14853 * FUNCTION : adjustBlackLevelForCFA
14854 *
14855 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14856 * of bayer CFA (Color Filter Array).
14857 *
14858 * PARAMETERS : @input: black level pattern in the order of RGGB
14859 * @output: black level pattern in the order of CFA
14860 * @color_arrangement: CFA color arrangement
14861 *
14862 * RETURN : None
14863 *==========================================================================*/
14864template<typename T>
14865void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14866 T input[BLACK_LEVEL_PATTERN_CNT],
14867 T output[BLACK_LEVEL_PATTERN_CNT],
14868 cam_color_filter_arrangement_t color_arrangement)
14869{
14870 switch (color_arrangement) {
14871 case CAM_FILTER_ARRANGEMENT_GRBG:
14872 output[0] = input[1];
14873 output[1] = input[0];
14874 output[2] = input[3];
14875 output[3] = input[2];
14876 break;
14877 case CAM_FILTER_ARRANGEMENT_GBRG:
14878 output[0] = input[2];
14879 output[1] = input[3];
14880 output[2] = input[0];
14881 output[3] = input[1];
14882 break;
14883 case CAM_FILTER_ARRANGEMENT_BGGR:
14884 output[0] = input[3];
14885 output[1] = input[2];
14886 output[2] = input[1];
14887 output[3] = input[0];
14888 break;
14889 case CAM_FILTER_ARRANGEMENT_RGGB:
14890 output[0] = input[0];
14891 output[1] = input[1];
14892 output[2] = input[2];
14893 output[3] = input[3];
14894 break;
14895 default:
14896 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14897 break;
14898 }
14899}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014900
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014901void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14902 CameraMetadata &resultMetadata,
14903 std::shared_ptr<metadata_buffer_t> settings)
14904{
14905 if (settings == nullptr) {
14906 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14907 return;
14908 }
14909
14910 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14911 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014912 } else {
14913 resultMetadata.erase(ANDROID_JPEG_GPS_COORDINATES);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014914 }
14915
14916 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14917 String8 str((const char *)gps_methods);
14918 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014919 } else {
14920 resultMetadata.erase(ANDROID_JPEG_GPS_PROCESSING_METHOD);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014921 }
14922
14923 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14924 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014925 } else {
14926 resultMetadata.erase(ANDROID_JPEG_GPS_TIMESTAMP);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014927 }
14928
14929 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14930 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014931 } else {
14932 resultMetadata.erase(ANDROID_JPEG_ORIENTATION);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014933 }
14934
14935 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14936 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14937 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014938 } else {
14939 resultMetadata.erase(ANDROID_JPEG_QUALITY);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014940 }
14941
14942 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14943 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14944 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014945 } else {
14946 resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_QUALITY);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014947 }
14948
14949 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14950 int32_t fwk_thumb_size[2];
14951 fwk_thumb_size[0] = thumb_size->width;
14952 fwk_thumb_size[1] = thumb_size->height;
14953 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014954 } else {
14955 resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_SIZE);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014956 }
14957
14958 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14959 uint8_t fwk_intent = intent[0];
14960 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014961 } else {
14962 resultMetadata.erase(ANDROID_CONTROL_CAPTURE_INTENT);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014963 }
14964}
14965
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014966bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
14967 const camera3_capture_request_t &request, const CameraMetadata &metadata) {
Chien-Yu Chenec328c82017-08-30 16:41:08 -070014968 if (metadata.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
14969 metadata.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 1) {
14970 ALOGV("%s: NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS is 1", __FUNCTION__);
14971 return false;
14972 }
14973
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014974 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14975 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14976 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014977 ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014978 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014979 return false;
14980 }
14981
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014982 if (!metadata.exists(ANDROID_EDGE_MODE) ||
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014983 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14984 ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014985 return false;
14986 }
14987
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014988 if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
14989 metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
14990 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
14991 ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
14992 return false;
14993 }
14994
14995 if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
14996 (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
14997 metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
14998 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
14999 ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
15000 return false;
15001 }
15002
15003 if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
15004 metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
15005 ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
15006 return false;
15007 }
15008
15009 if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
15010 metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
15011 ANDROID_CONTROL_EFFECT_MODE_OFF) {
15012 ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
15013 return false;
15014 }
15015
15016 if (!metadata.exists(ANDROID_CONTROL_MODE) ||
15017 (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
15018 metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
15019 ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
15020 ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
15021 return false;
15022 }
15023
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070015024 // TODO (b/66500626): support AE compensation.
15025 if (!metadata.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION) ||
15026 metadata.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0] != 0) {
15027 ALOGV("%s: ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION is not 0.", __FUNCTION__);
15028 return false;
15029 }
15030
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070015031 // TODO (b/32585046): support non-ZSL.
15032 if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
15033 metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
15034 ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
15035 return false;
15036 }
15037
15038 // TODO (b/32586081): support flash.
15039 if (!metadata.exists(ANDROID_FLASH_MODE) ||
15040 metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
15041 ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
15042 return false;
15043 }
15044
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070015045 if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
15046 metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
15047 ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
15048 return false;
15049 }
15050
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015051 switch (request.output_buffers[0].stream->format) {
15052 case HAL_PIXEL_FORMAT_BLOB:
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015053 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15054 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015055 break;
15056 default:
15057 ALOGV("%s: Not an HDR+ request: Only Jpeg and YUV output is supported.", __FUNCTION__);
15058 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
15059 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
15060 request.output_buffers[0].stream->width,
15061 request.output_buffers[0].stream->height,
15062 request.output_buffers[0].stream->format);
15063 }
15064 return false;
15065 }
15066
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070015067 return true;
15068}
15069
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015070void QCamera3HardwareInterface::abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest) {
15071 if (hdrPlusRequest == nullptr) return;
15072
15073 for (auto & outputBufferIter : hdrPlusRequest->outputBuffers) {
15074 // Find the stream for this buffer.
15075 for (auto streamInfo : mStreamInfo) {
15076 if (streamInfo->id == outputBufferIter.first) {
15077 if (streamInfo->channel == mPictureChannel) {
15078 // For picture channel, this buffer is internally allocated so return this
15079 // buffer to picture channel.
15080 mPictureChannel->returnYuvBuffer(outputBufferIter.second.get());
15081 } else {
15082 // Unregister this buffer for other channels.
15083 streamInfo->channel->unregisterBuffer(outputBufferIter.second.get());
15084 }
15085 break;
15086 }
15087 }
15088 }
15089
15090 hdrPlusRequest->outputBuffers.clear();
15091 hdrPlusRequest->frameworkOutputBuffers.clear();
15092}
15093
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070015094bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
15095 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
15096 const CameraMetadata &metadata)
15097{
15098 if (hdrPlusRequest == nullptr) return false;
15099 if (!isRequestHdrPlusCompatible(request, metadata)) return false;
15100
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015101 status_t res = OK;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015102 pbcamera::CaptureRequest pbRequest;
15103 pbRequest.id = request.frame_number;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015104 // Iterate through all requested output buffers and add them to an HDR+ request.
15105 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
15106 // Find the index of the stream in mStreamInfo.
15107 uint32_t pbStreamId = 0;
15108 bool found = false;
15109 for (auto streamInfo : mStreamInfo) {
15110 if (streamInfo->stream == request.output_buffers[i].stream) {
15111 pbStreamId = streamInfo->id;
15112 found = true;
15113 break;
15114 }
15115 }
15116
15117 if (!found) {
15118 ALOGE("%s: requested stream was not configured.", __FUNCTION__);
15119 abortPendingHdrplusRequest(hdrPlusRequest);
15120 return false;
15121 }
15122 auto outBuffer = std::make_shared<mm_camera_buf_def_t>();
15123 switch (request.output_buffers[i].stream->format) {
15124 case HAL_PIXEL_FORMAT_BLOB:
15125 {
15126 // For jpeg output, get a YUV buffer from pic channel.
15127 QCamera3PicChannel *picChannel =
15128 (QCamera3PicChannel*)request.output_buffers[i].stream->priv;
15129 res = picChannel->getYuvBufferForRequest(outBuffer.get(), request.frame_number);
15130 if (res != OK) {
15131 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
15132 __FUNCTION__, strerror(-res), res);
15133 abortPendingHdrplusRequest(hdrPlusRequest);
15134 return false;
15135 }
15136 break;
15137 }
15138 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15139 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15140 {
15141 // For YUV output, register the buffer and get the buffer def from the channel.
15142 QCamera3ProcessingChannel *channel =
15143 (QCamera3ProcessingChannel*)request.output_buffers[i].stream->priv;
15144 res = channel->registerBufferAndGetBufDef(request.output_buffers[i].buffer,
15145 outBuffer.get());
15146 if (res != OK) {
15147 ALOGE("%s: Getting the buffer def failed: %s (%d)", __FUNCTION__,
15148 strerror(-res), res);
15149 abortPendingHdrplusRequest(hdrPlusRequest);
15150 return false;
15151 }
15152 break;
15153 }
15154 default:
15155 abortPendingHdrplusRequest(hdrPlusRequest);
15156 return false;
15157 }
15158
15159 pbcamera::StreamBuffer buffer;
15160 buffer.streamId = pbStreamId;
15161 buffer.dmaBufFd = outBuffer->fd;
15162 buffer.data = outBuffer->fd == -1 ? outBuffer->buffer : nullptr;
15163 buffer.dataSize = outBuffer->frame_len;
15164
15165 pbRequest.outputBuffers.push_back(buffer);
15166
15167 hdrPlusRequest->outputBuffers.emplace(pbStreamId, outBuffer);
15168 hdrPlusRequest->frameworkOutputBuffers.emplace(pbStreamId, request.output_buffers[i]);
15169 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015170
15171 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070015172 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015173 if (res != OK) {
15174 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
15175 strerror(-res), res);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015176 abortPendingHdrplusRequest(hdrPlusRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015177 return false;
15178 }
15179
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015180 return true;
15181}
15182
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015183status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
15184{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015185 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
15186 return OK;
15187 }
15188
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015189 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015190 if (res != OK) {
15191 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
15192 strerror(-res), res);
15193 return res;
15194 }
15195 gHdrPlusClientOpening = true;
15196
15197 return OK;
15198}
15199
Chien-Yu Chenee335912017-02-09 17:53:20 -080015200status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
15201{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015202 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015203
Chien-Yu Chena6c99062017-05-23 13:45:06 -070015204 if (mHdrPlusModeEnabled) {
15205 return OK;
15206 }
15207
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015208 // Check if gHdrPlusClient is opened or being opened.
15209 if (gHdrPlusClient == nullptr) {
15210 if (gHdrPlusClientOpening) {
15211 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
15212 return OK;
15213 }
15214
15215 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015216 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015217 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
15218 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015219 return res;
15220 }
15221
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015222 // When opening HDR+ client completes, HDR+ mode will be enabled.
15223 return OK;
15224
Chien-Yu Chenee335912017-02-09 17:53:20 -080015225 }
15226
15227 // Configure stream for HDR+.
15228 res = configureHdrPlusStreamsLocked();
15229 if (res != OK) {
15230 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015231 return res;
15232 }
15233
15234 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
15235 res = gHdrPlusClient->setZslHdrPlusMode(true);
15236 if (res != OK) {
15237 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080015238 return res;
15239 }
15240
15241 mHdrPlusModeEnabled = true;
15242 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
15243
15244 return OK;
15245}
15246
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015247void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
15248{
15249 if (gHdrPlusClientOpening) {
15250 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
15251 }
15252}
15253
Chien-Yu Chenee335912017-02-09 17:53:20 -080015254void QCamera3HardwareInterface::disableHdrPlusModeLocked()
15255{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015256 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015257 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015258 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
15259 if (res != OK) {
15260 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
15261 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015262
15263 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015264 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015265 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015266 }
15267
15268 mHdrPlusModeEnabled = false;
15269 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
15270}
15271
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015272bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
15273{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015274 // Check that at least one YUV or one JPEG output is configured.
15275 // TODO: Support RAW (b/36690506)
15276 for (auto streamInfo : mStreamInfo) {
15277 if (streamInfo != nullptr && streamInfo->stream != nullptr) {
15278 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
15279 (streamInfo->stream->format == HAL_PIXEL_FORMAT_BLOB ||
15280 streamInfo->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15281 streamInfo->stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
15282 return true;
15283 }
15284 }
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015285 }
15286
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015287 return false;
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015288}
15289
Chien-Yu Chenee335912017-02-09 17:53:20 -080015290status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015291{
15292 pbcamera::InputConfiguration inputConfig;
15293 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
15294 status_t res = OK;
15295
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015296 // Sensor MIPI will send data to Easel.
15297 inputConfig.isSensorInput = true;
15298 inputConfig.sensorMode.cameraId = mCameraId;
15299 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
15300 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
15301 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
15302 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
15303 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
15304 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenc8b6ad02017-09-15 13:50:26 -070015305 inputConfig.sensorMode.timestampCropOffsetNs = mSensorModeInfo.timestamp_crop_offset;
15306
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015307 if (mSensorModeInfo.num_raw_bits != 10) {
15308 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
15309 mSensorModeInfo.num_raw_bits);
15310 return BAD_VALUE;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015311 }
15312
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015313 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015314
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015315 // Iterate through configured output streams in HAL and configure those streams in HDR+
15316 // service.
15317 for (auto streamInfo : mStreamInfo) {
15318 pbcamera::StreamConfiguration outputConfig;
15319 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT) {
15320 switch (streamInfo->stream->format) {
15321 case HAL_PIXEL_FORMAT_BLOB:
15322 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15323 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15324 res = fillPbStreamConfig(&outputConfig, streamInfo->id,
15325 streamInfo->channel, /*stream index*/0);
15326 if (res != OK) {
15327 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
15328 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015329
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015330 return res;
15331 }
15332
15333 outputStreamConfigs.push_back(outputConfig);
15334 break;
15335 default:
15336 // TODO: handle RAW16 outputs if mRawChannel was created. (b/36690506)
15337 break;
15338 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015339 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015340 }
15341
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015342 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015343 if (res != OK) {
15344 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
15345 strerror(-res), res);
15346 return res;
15347 }
15348
15349 return OK;
15350}
15351
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015352void QCamera3HardwareInterface::handleEaselFatalError()
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015353{
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015354 pthread_mutex_lock(&mMutex);
15355 mState = ERROR;
15356 pthread_mutex_unlock(&mMutex);
15357
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015358 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015359}
15360
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015361void QCamera3HardwareInterface::handleEaselFatalErrorAsync()
15362{
15363 if (mEaselErrorFuture.valid()) {
15364 // The error future has been invoked.
15365 return;
15366 }
15367
15368 // Launch a future to handle the fatal error.
15369 mEaselErrorFuture = std::async(std::launch::async,
15370 &QCamera3HardwareInterface::handleEaselFatalError, this);
15371}
15372
15373void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
15374{
15375 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
15376 handleEaselFatalErrorAsync();
15377}
15378
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015379void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15380{
Arnd Geis8cbfc182017-09-07 14:46:41 -070015381 int rc = NO_ERROR;
15382
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015383 if (client == nullptr) {
15384 ALOGE("%s: Opened client is null.", __FUNCTION__);
15385 return;
15386 }
15387
Chien-Yu Chene96475e2017-04-11 11:53:26 -070015388 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015389 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15390
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015391 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015392 if (!gHdrPlusClientOpening) {
15393 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15394 return;
15395 }
15396
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015397 gHdrPlusClient = std::move(client);
15398 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015399 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015400
15401 // Set static metadata.
15402 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15403 if (res != OK) {
15404 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15405 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015406 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015407 gHdrPlusClient = nullptr;
15408 return;
15409 }
15410
15411 // Enable HDR+ mode.
15412 res = enableHdrPlusModeLocked();
15413 if (res != OK) {
15414 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15415 }
Arnd Geis8cbfc182017-09-07 14:46:41 -070015416
15417 // Get Easel firmware version
15418 if (EaselManagerClientOpened) {
15419 rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
15420 if (rc != OK) {
15421 ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
15422 } else {
15423 mEaselFwUpdated = true;
15424 }
15425 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015426}
15427
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015428void QCamera3HardwareInterface::onOpenFailed(status_t err)
15429{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015430 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015431 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015432 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015433 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015434}
15435
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015436void QCamera3HardwareInterface::onFatalError()
15437{
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015438 ALOGE("%s: HDR+ client encountered a fatal error.", __FUNCTION__);
15439 handleEaselFatalErrorAsync();
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015440}
15441
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070015442void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15443{
15444 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15445 __LINE__, requestId, apSensorTimestampNs);
15446
15447 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15448}
15449
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015450void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
15451{
15452 pthread_mutex_lock(&mMutex);
15453
15454 // Find the pending request for this result metadata.
15455 auto requestIter = mPendingRequestsList.begin();
15456 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15457 requestIter++;
15458 }
15459
15460 if (requestIter == mPendingRequestsList.end()) {
15461 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15462 pthread_mutex_unlock(&mMutex);
15463 return;
15464 }
15465
15466 requestIter->partial_result_cnt++;
15467
15468 CameraMetadata metadata;
15469 uint8_t ready = true;
15470 metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
15471
15472 // Send it to framework.
15473 camera3_capture_result_t result = {};
15474
15475 result.result = metadata.getAndLock();
15476 // Populate metadata result
15477 result.frame_number = requestId;
15478 result.num_output_buffers = 0;
15479 result.output_buffers = NULL;
15480 result.partial_result = requestIter->partial_result_cnt;
15481
15482 orchestrateResult(&result);
15483 metadata.unlock(result.result);
15484
15485 pthread_mutex_unlock(&mMutex);
15486}
15487
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070015488void QCamera3HardwareInterface::onPostview(uint32_t requestId,
15489 std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
15490 uint32_t stride, int32_t format)
15491{
15492 if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
15493 ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
15494 __LINE__, width, height, requestId);
15495 char buf[FILENAME_MAX] = {};
15496 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
15497 requestId, width, height);
15498
15499 pbcamera::StreamConfiguration config = {};
15500 config.image.width = width;
15501 config.image.height = height;
15502 config.image.format = format;
15503
15504 pbcamera::PlaneConfiguration plane = {};
15505 plane.stride = stride;
15506 plane.scanline = height;
15507
15508 config.image.planes.push_back(plane);
15509
15510 pbcamera::StreamBuffer buffer = {};
15511 buffer.streamId = 0;
15512 buffer.dmaBufFd = -1;
15513 buffer.data = postview->data();
15514 buffer.dataSize = postview->size();
15515
15516 hdrplus_client_utils::writePpm(buf, config, buffer);
15517 }
15518
15519 pthread_mutex_lock(&mMutex);
15520
15521 // Find the pending request for this result metadata.
15522 auto requestIter = mPendingRequestsList.begin();
15523 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15524 requestIter++;
15525 }
15526
15527 if (requestIter == mPendingRequestsList.end()) {
15528 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15529 pthread_mutex_unlock(&mMutex);
15530 return;
15531 }
15532
15533 requestIter->partial_result_cnt++;
15534
15535 CameraMetadata metadata;
15536 int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
15537 static_cast<int32_t>(stride)};
15538 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
15539 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
15540
15541 // Send it to framework.
15542 camera3_capture_result_t result = {};
15543
15544 result.result = metadata.getAndLock();
15545 // Populate metadata result
15546 result.frame_number = requestId;
15547 result.num_output_buffers = 0;
15548 result.output_buffers = NULL;
15549 result.partial_result = requestIter->partial_result_cnt;
15550
15551 orchestrateResult(&result);
15552 metadata.unlock(result.result);
15553
15554 pthread_mutex_unlock(&mMutex);
15555}
15556
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015557void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015558 const camera_metadata_t &resultMetadata)
15559{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015560 if (result == nullptr) {
15561 ALOGE("%s: result is nullptr.", __FUNCTION__);
15562 return;
15563 }
15564
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015565 // Find the pending HDR+ request.
15566 HdrPlusPendingRequest pendingRequest;
15567 {
15568 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15569 auto req = mHdrPlusPendingRequests.find(result->requestId);
15570 pendingRequest = req->second;
15571 }
15572
15573 // Update the result metadata with the settings of the HDR+ still capture request because
15574 // the result metadata belongs to a ZSL buffer.
15575 CameraMetadata metadata;
15576 metadata = &resultMetadata;
15577 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15578 camera_metadata_t* updatedResultMetadata = metadata.release();
15579
15580 uint32_t halSnapshotStreamId = 0;
15581 if (mPictureChannel != nullptr) {
15582 halSnapshotStreamId = mPictureChannel->getStreamID(mPictureChannel->getStreamTypeMask());
15583 }
15584
15585 auto halMetadata = std::make_shared<metadata_buffer_t>();
15586 clear_metadata_buffer(halMetadata.get());
15587
15588 // Convert updated result metadata to HAL metadata.
15589 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15590 halSnapshotStreamId, /*minFrameDuration*/0);
15591 if (res != 0) {
15592 ALOGE("%s: Translating metadata failed: %s (%d)", __FUNCTION__, strerror(-res), res);
15593 }
15594
15595 for (auto &outputBuffer : result->outputBuffers) {
15596 uint32_t streamId = outputBuffer.streamId;
15597
15598 // Find the framework output buffer in the pending request.
15599 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15600 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15601 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15602 streamId);
15603 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015604 }
15605
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015606 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15607
15608 // Find the channel for the output buffer.
15609 QCamera3ProcessingChannel *channel =
15610 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15611
15612 // Find the output buffer def.
15613 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15614 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15615 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15616 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015617 }
15618
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015619 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015620
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015621 // Check whether to dump the buffer.
15622 if (frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15623 frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
15624 // If the stream format is YUV or jpeg, check if dumping HDR+ YUV output is enabled.
15625 char prop[PROPERTY_VALUE_MAX];
15626 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15627 bool dumpYuvOutput = atoi(prop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015628
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015629 if (dumpYuvOutput) {
15630 // Dump yuv buffer to a ppm file.
15631 pbcamera::StreamConfiguration outputConfig;
15632 status_t rc = fillPbStreamConfig(&outputConfig, streamId,
15633 channel, /*stream index*/0);
15634 if (rc == OK) {
15635 char buf[FILENAME_MAX] = {};
15636 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15637 result->requestId, streamId,
15638 outputConfig.image.width, outputConfig.image.height);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015639
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015640 hdrplus_client_utils::writePpm(buf, outputConfig, outputBuffer);
15641 } else {
15642 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: "
15643 "%s (%d).", __FUNCTION__, strerror(-rc), rc);
15644 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015645 }
15646 }
15647
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015648 if (channel == mPictureChannel) {
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015649 // Return the buffer to pic channel for encoding.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015650 mPictureChannel->returnYuvBufferAndEncode(outputBufferDef.get(),
15651 frameworkOutputBuffer->buffer, result->requestId,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015652 halMetadata);
15653 } else {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015654 // Return the buffer to camera framework.
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015655 pthread_mutex_lock(&mMutex);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015656 handleBufferWithLock(frameworkOutputBuffer, result->requestId);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015657 channel->unregisterBuffer(outputBufferDef.get());
Chien-Yu Chen0c8eaaa2017-09-19 14:13:14 -070015658 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015659 }
15660 }
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015661
15662 // Send HDR+ metadata to framework.
15663 {
15664 pthread_mutex_lock(&mMutex);
15665
15666 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15667 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
15668 pthread_mutex_unlock(&mMutex);
15669 }
15670
15671 // Remove the HDR+ pending request.
15672 {
15673 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15674 auto req = mHdrPlusPendingRequests.find(result->requestId);
15675 mHdrPlusPendingRequests.erase(req);
15676 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015677}
15678
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015679void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15680{
15681 if (failedResult == nullptr) {
15682 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15683 return;
15684 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015685
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015686 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015687
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015688 // Find the pending HDR+ request.
15689 HdrPlusPendingRequest pendingRequest;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015690 {
15691 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015692 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15693 if (req == mHdrPlusPendingRequests.end()) {
15694 ALOGE("%s: Couldn't find pending request %d", __FUNCTION__, failedResult->requestId);
15695 return;
15696 }
15697 pendingRequest = req->second;
15698 }
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015699
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015700 for (auto &outputBuffer : failedResult->outputBuffers) {
15701 uint32_t streamId = outputBuffer.streamId;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015702
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015703 // Find the channel
15704 // Find the framework output buffer in the pending request.
15705 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15706 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15707 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15708 streamId);
15709 continue;
15710 }
15711
15712 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15713
15714 // Find the channel for the output buffer.
15715 QCamera3ProcessingChannel *channel =
15716 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15717
15718 // Find the output buffer def.
15719 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15720 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15721 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15722 continue;
15723 }
15724
15725 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
15726
15727 if (channel == mPictureChannel) {
15728 // Return the buffer to pic channel.
15729 mPictureChannel->returnYuvBuffer(outputBufferDef.get());
15730 } else {
15731 channel->unregisterBuffer(outputBufferDef.get());
15732 }
15733 }
15734
15735 // Remove the HDR+ pending request.
15736 {
15737 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15738 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15739 mHdrPlusPendingRequests.erase(req);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015740 }
15741
15742 pthread_mutex_lock(&mMutex);
15743
15744 // Find the pending buffers.
15745 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15746 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15747 if (pendingBuffers->frame_number == failedResult->requestId) {
15748 break;
15749 }
15750 pendingBuffers++;
15751 }
15752
15753 // Send out buffer errors for the pending buffers.
15754 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15755 std::vector<camera3_stream_buffer_t> streamBuffers;
15756 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15757 // Prepare a stream buffer.
15758 camera3_stream_buffer_t streamBuffer = {};
15759 streamBuffer.stream = buffer.stream;
15760 streamBuffer.buffer = buffer.buffer;
15761 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15762 streamBuffer.acquire_fence = -1;
15763 streamBuffer.release_fence = -1;
15764
15765 streamBuffers.push_back(streamBuffer);
15766
15767 // Send out error buffer event.
15768 camera3_notify_msg_t notify_msg = {};
15769 notify_msg.type = CAMERA3_MSG_ERROR;
15770 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15771 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15772 notify_msg.message.error.error_stream = buffer.stream;
15773
15774 orchestrateNotify(&notify_msg);
15775 }
15776
15777 camera3_capture_result_t result = {};
15778 result.frame_number = pendingBuffers->frame_number;
15779 result.num_output_buffers = streamBuffers.size();
15780 result.output_buffers = &streamBuffers[0];
15781
15782 // Send out result with buffer errors.
15783 orchestrateResult(&result);
15784
15785 // Remove pending buffers.
15786 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15787 }
15788
15789 // Remove pending request.
15790 auto halRequest = mPendingRequestsList.begin();
15791 while (halRequest != mPendingRequestsList.end()) {
15792 if (halRequest->frame_number == failedResult->requestId) {
15793 mPendingRequestsList.erase(halRequest);
15794 break;
15795 }
15796 halRequest++;
15797 }
15798
15799 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015800}
15801
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015802
15803ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15804 mParent(parent) {}
15805
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015806void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015807{
15808 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015809
15810 if (isReprocess) {
15811 mReprocessShutters.emplace(frameNumber, Shutter());
15812 } else {
15813 mShutters.emplace(frameNumber, Shutter());
15814 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015815}
15816
15817void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15818{
15819 std::lock_guard<std::mutex> lock(mLock);
15820
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015821 std::map<uint32_t, Shutter> *shutters = nullptr;
15822
15823 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015824 auto shutter = mShutters.find(frameNumber);
15825 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015826 shutter = mReprocessShutters.find(frameNumber);
15827 if (shutter == mReprocessShutters.end()) {
15828 // Shutter was already sent.
15829 return;
15830 }
15831 shutters = &mReprocessShutters;
15832 } else {
15833 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015834 }
15835
Chien-Yu Chen0469c9b2017-09-22 13:22:19 -070015836 if (shutter->second.ready) {
15837 // If shutter is already ready, don't update timestamp again.
15838 return;
15839 }
15840
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015841 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015842 shutter->second.ready = true;
15843 shutter->second.timestamp = timestamp;
15844
15845 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015846 shutter = shutters->begin();
15847 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015848 if (!shutter->second.ready) {
15849 // If this shutter is not ready, the following shutters can't be sent.
15850 break;
15851 }
15852
15853 camera3_notify_msg_t msg = {};
15854 msg.type = CAMERA3_MSG_SHUTTER;
15855 msg.message.shutter.frame_number = shutter->first;
15856 msg.message.shutter.timestamp = shutter->second.timestamp;
15857 mParent->orchestrateNotify(&msg);
15858
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015859 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015860 }
15861}
15862
15863void ShutterDispatcher::clear(uint32_t frameNumber)
15864{
15865 std::lock_guard<std::mutex> lock(mLock);
15866 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015867 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015868}
15869
15870void ShutterDispatcher::clear()
15871{
15872 std::lock_guard<std::mutex> lock(mLock);
15873
15874 // Log errors for stale shutters.
15875 for (auto &shutter : mShutters) {
15876 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15877 __FUNCTION__, shutter.first, shutter.second.ready,
15878 shutter.second.timestamp);
15879 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015880
15881 // Log errors for stale reprocess shutters.
15882 for (auto &shutter : mReprocessShutters) {
15883 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15884 __FUNCTION__, shutter.first, shutter.second.ready,
15885 shutter.second.timestamp);
15886 }
15887
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015888 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015889 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015890}
15891
15892OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15893 mParent(parent) {}
15894
15895status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15896{
15897 std::lock_guard<std::mutex> lock(mLock);
15898 mStreamBuffers.clear();
15899 if (!streamList) {
15900 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15901 return -EINVAL;
15902 }
15903
15904 // Create a "frame-number -> buffer" map for each stream.
15905 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15906 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15907 }
15908
15909 return OK;
15910}
15911
15912status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15913{
15914 std::lock_guard<std::mutex> lock(mLock);
15915
15916 // Find the "frame-number -> buffer" map for the stream.
15917 auto buffers = mStreamBuffers.find(stream);
15918 if (buffers == mStreamBuffers.end()) {
15919 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15920 return -EINVAL;
15921 }
15922
15923 // Create an unready buffer for this frame number.
15924 buffers->second.emplace(frameNumber, Buffer());
15925 return OK;
15926}
15927
15928void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15929 const camera3_stream_buffer_t &buffer)
15930{
15931 std::lock_guard<std::mutex> lock(mLock);
15932
15933 // Find the frame number -> buffer map for the stream.
15934 auto buffers = mStreamBuffers.find(buffer.stream);
15935 if (buffers == mStreamBuffers.end()) {
15936 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15937 return;
15938 }
15939
15940 // Find the unready buffer this frame number and mark it ready.
15941 auto pendingBuffer = buffers->second.find(frameNumber);
15942 if (pendingBuffer == buffers->second.end()) {
15943 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15944 return;
15945 }
15946
15947 pendingBuffer->second.ready = true;
15948 pendingBuffer->second.buffer = buffer;
15949
15950 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15951 pendingBuffer = buffers->second.begin();
15952 while (pendingBuffer != buffers->second.end()) {
15953 if (!pendingBuffer->second.ready) {
15954 // If this buffer is not ready, the following buffers can't be sent.
15955 break;
15956 }
15957
15958 camera3_capture_result_t result = {};
15959 result.frame_number = pendingBuffer->first;
15960 result.num_output_buffers = 1;
15961 result.output_buffers = &pendingBuffer->second.buffer;
15962
15963 // Send out result with buffer errors.
15964 mParent->orchestrateResult(&result);
15965
15966 pendingBuffer = buffers->second.erase(pendingBuffer);
15967 }
15968}
15969
15970void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15971{
15972 std::lock_guard<std::mutex> lock(mLock);
15973
15974 // Log errors for stale buffers.
15975 for (auto &buffers : mStreamBuffers) {
15976 for (auto &buffer : buffers.second) {
15977 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15978 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15979 }
15980 buffers.second.clear();
15981 }
15982
15983 if (clearConfiguredStreams) {
15984 mStreamBuffers.clear();
15985 }
15986}
15987
Thierry Strudel3d639192016-09-09 11:52:26 -070015988}; //end namespace qcamera