blob: 707c0426754e45f53eed7a3c0b3ae94647b5986d [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070075// mm_camera has 2 partial results: 3A, and final result.
76// HDR+ requests have 3 partial results: postview, next request ready, and final result.
77#define PARTIAL_RESULT_COUNT 3
Thierry Strudel3d639192016-09-09 11:52:26 -070078#define FRAME_SKIP_DELAY 0
79
80#define MAX_VALUE_8BIT ((1<<8)-1)
81#define MAX_VALUE_10BIT ((1<<10)-1)
82#define MAX_VALUE_12BIT ((1<<12)-1)
83
84#define VIDEO_4K_WIDTH 3840
85#define VIDEO_4K_HEIGHT 2160
86
Jason Leeb9e76432017-03-10 17:14:19 -080087#define MAX_EIS_WIDTH 3840
88#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070089
90#define MAX_RAW_STREAMS 1
91#define MAX_STALLING_STREAMS 1
92#define MAX_PROCESSED_STREAMS 3
93/* Batch mode is enabled only if FPS set is equal to or greater than this */
94#define MIN_FPS_FOR_BATCH_MODE (120)
95#define PREVIEW_FPS_FOR_HFR (30)
96#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080097#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070098#define MAX_HFR_BATCH_SIZE (8)
99#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +0100101#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700137// Whether to check for the GPU stride padding, or use the default
138//#define CHECK_GPU_PIXEL_ALIGNMENT
139
Thierry Strudel3d639192016-09-09 11:52:26 -0700140cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142extern pthread_mutex_t gCamLock;
143volatile uint32_t gCamHal3LogLevel = 1;
144extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800146// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700148std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700152std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700155
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800156// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
157bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700158
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700159std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700160
Thierry Strudel3d639192016-09-09 11:52:26 -0700161
162const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
163 {"On", CAM_CDS_MODE_ON},
164 {"Off", CAM_CDS_MODE_OFF},
165 {"Auto",CAM_CDS_MODE_AUTO}
166};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700167const QCamera3HardwareInterface::QCameraMap<
168 camera_metadata_enum_android_video_hdr_mode_t,
169 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
170 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
171 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
172};
173
Thierry Strudel54dc9782017-02-15 12:12:10 -0800174const QCamera3HardwareInterface::QCameraMap<
175 camera_metadata_enum_android_binning_correction_mode_t,
176 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
177 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
178 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
179};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700180
181const QCamera3HardwareInterface::QCameraMap<
182 camera_metadata_enum_android_ir_mode_t,
183 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
184 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
185 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
186 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
187};
Thierry Strudel3d639192016-09-09 11:52:26 -0700188
189const QCamera3HardwareInterface::QCameraMap<
190 camera_metadata_enum_android_control_effect_mode_t,
191 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
192 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
193 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
194 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
195 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
197 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
198 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
199 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
201};
202
203const QCamera3HardwareInterface::QCameraMap<
204 camera_metadata_enum_android_control_awb_mode_t,
205 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
206 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
207 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
208 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
209 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
210 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
211 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
212 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
214 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
215};
216
217const QCamera3HardwareInterface::QCameraMap<
218 camera_metadata_enum_android_control_scene_mode_t,
219 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
220 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
221 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
222 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
223 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
224 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
226 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
227 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
228 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
229 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
230 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
231 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
232 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
233 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
234 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800235 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
236 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700237};
238
239const QCamera3HardwareInterface::QCameraMap<
240 camera_metadata_enum_android_control_af_mode_t,
241 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
242 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
244 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
245 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
246 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
247 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
249};
250
251const QCamera3HardwareInterface::QCameraMap<
252 camera_metadata_enum_android_color_correction_aberration_mode_t,
253 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
255 CAM_COLOR_CORRECTION_ABERRATION_OFF },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
257 CAM_COLOR_CORRECTION_ABERRATION_FAST },
258 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
259 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
260};
261
262const QCamera3HardwareInterface::QCameraMap<
263 camera_metadata_enum_android_control_ae_antibanding_mode_t,
264 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
269};
270
271const QCamera3HardwareInterface::QCameraMap<
272 camera_metadata_enum_android_control_ae_mode_t,
273 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
274 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
275 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
277 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
Shuzhen Wang3d11a642017-08-18 09:57:29 -0700278 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
279 { (camera_metadata_enum_android_control_ae_mode_t)
280 NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
Thierry Strudel3d639192016-09-09 11:52:26 -0700281};
282
283const QCamera3HardwareInterface::QCameraMap<
284 camera_metadata_enum_android_flash_mode_t,
285 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
286 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
287 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
288 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
289};
290
291const QCamera3HardwareInterface::QCameraMap<
292 camera_metadata_enum_android_statistics_face_detect_mode_t,
293 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
297};
298
299const QCamera3HardwareInterface::QCameraMap<
300 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
301 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
303 CAM_FOCUS_UNCALIBRATED },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
305 CAM_FOCUS_APPROXIMATE },
306 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
307 CAM_FOCUS_CALIBRATED }
308};
309
310const QCamera3HardwareInterface::QCameraMap<
311 camera_metadata_enum_android_lens_state_t,
312 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
313 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
314 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
315};
316
317const int32_t available_thumbnail_sizes[] = {0, 0,
318 176, 144,
319 240, 144,
320 256, 144,
321 240, 160,
322 256, 154,
323 240, 240,
324 320, 240};
325
326const QCamera3HardwareInterface::QCameraMap<
327 camera_metadata_enum_android_sensor_test_pattern_mode_t,
328 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
335};
336
337/* Since there is no mapping for all the options some Android enum are not listed.
338 * Also, the order in this list is important because while mapping from HAL to Android it will
339 * traverse from lower to higher index which means that for HAL values that are map to different
340 * Android values, the traverse logic will select the first one found.
341 */
342const QCamera3HardwareInterface::QCameraMap<
343 camera_metadata_enum_android_sensor_reference_illuminant1_t,
344 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
361};
362
363const QCamera3HardwareInterface::QCameraMap<
364 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
365 { 60, CAM_HFR_MODE_60FPS},
366 { 90, CAM_HFR_MODE_90FPS},
367 { 120, CAM_HFR_MODE_120FPS},
368 { 150, CAM_HFR_MODE_150FPS},
369 { 180, CAM_HFR_MODE_180FPS},
370 { 210, CAM_HFR_MODE_210FPS},
371 { 240, CAM_HFR_MODE_240FPS},
372 { 480, CAM_HFR_MODE_480FPS},
373};
374
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700375const QCamera3HardwareInterface::QCameraMap<
376 qcamera3_ext_instant_aec_mode_t,
377 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
378 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
381};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800382
383const QCamera3HardwareInterface::QCameraMap<
384 qcamera3_ext_exposure_meter_mode_t,
385 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
386 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
387 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
388 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
389 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
390 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
391 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
392 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
393};
394
395const QCamera3HardwareInterface::QCameraMap<
396 qcamera3_ext_iso_mode_t,
397 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
398 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
399 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
400 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
401 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
402 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
403 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
404 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
405 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
406};
407
Thierry Strudel3d639192016-09-09 11:52:26 -0700408camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
409 .initialize = QCamera3HardwareInterface::initialize,
410 .configure_streams = QCamera3HardwareInterface::configure_streams,
411 .register_stream_buffers = NULL,
412 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
413 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
414 .get_metadata_vendor_tag_ops = NULL,
415 .dump = QCamera3HardwareInterface::dump,
416 .flush = QCamera3HardwareInterface::flush,
417 .reserved = {0},
418};
419
420// initialise to some default value
421uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
422
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700423static inline void logEaselEvent(const char *tag, const char *event) {
424 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
425 struct timespec ts = {};
426 static int64_t kMsPerSec = 1000;
427 static int64_t kNsPerMs = 1000000;
428 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
429 if (res != OK) {
430 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
431 } else {
432 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
433 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
434 }
435 }
436}
437
Thierry Strudel3d639192016-09-09 11:52:26 -0700438/*===========================================================================
439 * FUNCTION : QCamera3HardwareInterface
440 *
441 * DESCRIPTION: constructor of QCamera3HardwareInterface
442 *
443 * PARAMETERS :
444 * @cameraId : camera ID
445 *
446 * RETURN : none
447 *==========================================================================*/
448QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
449 const camera_module_callbacks_t *callbacks)
450 : mCameraId(cameraId),
451 mCameraHandle(NULL),
452 mCameraInitialized(false),
453 mCallbackOps(NULL),
454 mMetadataChannel(NULL),
455 mPictureChannel(NULL),
456 mRawChannel(NULL),
457 mSupportChannel(NULL),
458 mAnalysisChannel(NULL),
459 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700460 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700461 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800462 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100463 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800464 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700465 mChannelHandle(0),
466 mFirstConfiguration(true),
467 mFlush(false),
468 mFlushPerf(false),
469 mParamHeap(NULL),
470 mParameters(NULL),
471 mPrevParameters(NULL),
472 m_bIsVideo(false),
473 m_bIs4KVideo(false),
474 m_bEisSupportedSize(false),
475 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800476 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700477 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700478 mShutterDispatcher(this),
479 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700480 mMinProcessedFrameDuration(0),
481 mMinJpegFrameDuration(0),
482 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100483 mExpectedFrameDuration(0),
484 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700485 mMetaFrameCount(0U),
486 mUpdateDebugLevel(false),
487 mCallbacks(callbacks),
488 mCaptureIntent(0),
489 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700490 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800491 /* DevCamDebug metadata internal m control*/
492 mDevCamDebugMetaEnable(0),
493 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700494 mBatchSize(0),
495 mToBeQueuedVidBufs(0),
496 mHFRVideoFps(DEFAULT_VIDEO_FPS),
497 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800498 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800499 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700500 mFirstFrameNumberInBatch(0),
501 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800502 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700503 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
504 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000505 mPDSupported(false),
506 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700507 mInstantAEC(false),
508 mResetInstantAEC(false),
509 mInstantAECSettledFrameNumber(0),
510 mAecSkipDisplayFrameBound(0),
511 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700512 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800513 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700514 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700515 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700516 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700517 mState(CLOSED),
518 mIsDeviceLinked(false),
519 mIsMainCamera(true),
520 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700521 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800522 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800523 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700524 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800525 mIsApInputUsedForHdrPlus(false),
526 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700527 m_bSensorHDREnabled(false),
528 mAfTrigger()
Thierry Strudel3d639192016-09-09 11:52:26 -0700529{
530 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700531 mCommon.init(gCamCapability[cameraId]);
532 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700533#ifndef USE_HAL_3_3
534 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
535#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700536 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700537#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700538 mCameraDevice.common.close = close_camera_device;
539 mCameraDevice.ops = &mCameraOps;
540 mCameraDevice.priv = this;
541 gCamCapability[cameraId]->version = CAM_HAL_V3;
542 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
543 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
544 gCamCapability[cameraId]->min_num_pp_bufs = 3;
545
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800546 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700547
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800548 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700549 mPendingLiveRequest = 0;
550 mCurrentRequestId = -1;
551 pthread_mutex_init(&mMutex, NULL);
552
553 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
554 mDefaultMetadata[i] = NULL;
555
556 // Getting system props of different kinds
557 char prop[PROPERTY_VALUE_MAX];
558 memset(prop, 0, sizeof(prop));
559 property_get("persist.camera.raw.dump", prop, "0");
560 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800561 property_get("persist.camera.hal3.force.hdr", prop, "0");
562 mForceHdrSnapshot = atoi(prop);
563
Thierry Strudel3d639192016-09-09 11:52:26 -0700564 if (mEnableRawDump)
565 LOGD("Raw dump from Camera HAL enabled");
566
567 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
568 memset(mLdafCalib, 0, sizeof(mLdafCalib));
569
570 memset(prop, 0, sizeof(prop));
571 property_get("persist.camera.tnr.preview", prop, "0");
572 m_bTnrPreview = (uint8_t)atoi(prop);
573
574 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800575 property_get("persist.camera.swtnr.preview", prop, "1");
576 m_bSwTnrPreview = (uint8_t)atoi(prop);
577
578 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700579 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700580 m_bTnrVideo = (uint8_t)atoi(prop);
581
582 memset(prop, 0, sizeof(prop));
583 property_get("persist.camera.avtimer.debug", prop, "0");
584 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800585 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700586
Thierry Strudel54dc9782017-02-15 12:12:10 -0800587 memset(prop, 0, sizeof(prop));
588 property_get("persist.camera.cacmode.disable", prop, "0");
589 m_cacModeDisabled = (uint8_t)atoi(prop);
590
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700591 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700592 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700593
Thierry Strudel3d639192016-09-09 11:52:26 -0700594 //Load and read GPU library.
595 lib_surface_utils = NULL;
596 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700597 mSurfaceStridePadding = CAM_PAD_TO_64;
598#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700599 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
600 if (lib_surface_utils) {
601 *(void **)&LINK_get_surface_pixel_alignment =
602 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
603 if (LINK_get_surface_pixel_alignment) {
604 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
605 }
606 dlclose(lib_surface_utils);
607 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700608#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000609 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
610 mPDSupported = (0 <= mPDIndex) ? true : false;
611
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700612 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700613}
614
615/*===========================================================================
616 * FUNCTION : ~QCamera3HardwareInterface
617 *
618 * DESCRIPTION: destructor of QCamera3HardwareInterface
619 *
620 * PARAMETERS : none
621 *
622 * RETURN : none
623 *==========================================================================*/
624QCamera3HardwareInterface::~QCamera3HardwareInterface()
625{
626 LOGD("E");
627
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800628 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700629
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800630 // Disable power hint and enable the perf lock for close camera
631 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
632 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
633
634 // unlink of dualcam during close camera
635 if (mIsDeviceLinked) {
636 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
637 &m_pDualCamCmdPtr->bundle_info;
638 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
639 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
640 pthread_mutex_lock(&gCamLock);
641
642 if (mIsMainCamera == 1) {
643 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
644 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
645 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
646 // related session id should be session id of linked session
647 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
648 } else {
649 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
650 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
651 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
652 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
653 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800654 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800655 pthread_mutex_unlock(&gCamLock);
656
657 rc = mCameraHandle->ops->set_dual_cam_cmd(
658 mCameraHandle->camera_handle);
659 if (rc < 0) {
660 LOGE("Dualcam: Unlink failed, but still proceed to close");
661 }
662 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700663
664 /* We need to stop all streams before deleting any stream */
665 if (mRawDumpChannel) {
666 mRawDumpChannel->stop();
667 }
668
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700669 if (mHdrPlusRawSrcChannel) {
670 mHdrPlusRawSrcChannel->stop();
671 }
672
Thierry Strudel3d639192016-09-09 11:52:26 -0700673 // NOTE: 'camera3_stream_t *' objects are already freed at
674 // this stage by the framework
675 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
676 it != mStreamInfo.end(); it++) {
677 QCamera3ProcessingChannel *channel = (*it)->channel;
678 if (channel) {
679 channel->stop();
680 }
681 }
682 if (mSupportChannel)
683 mSupportChannel->stop();
684
685 if (mAnalysisChannel) {
686 mAnalysisChannel->stop();
687 }
688 if (mMetadataChannel) {
689 mMetadataChannel->stop();
690 }
691 if (mChannelHandle) {
692 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -0700693 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700694 LOGD("stopping channel %d", mChannelHandle);
695 }
696
697 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
698 it != mStreamInfo.end(); it++) {
699 QCamera3ProcessingChannel *channel = (*it)->channel;
700 if (channel)
701 delete channel;
702 free (*it);
703 }
704 if (mSupportChannel) {
705 delete mSupportChannel;
706 mSupportChannel = NULL;
707 }
708
709 if (mAnalysisChannel) {
710 delete mAnalysisChannel;
711 mAnalysisChannel = NULL;
712 }
713 if (mRawDumpChannel) {
714 delete mRawDumpChannel;
715 mRawDumpChannel = NULL;
716 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700717 if (mHdrPlusRawSrcChannel) {
718 delete mHdrPlusRawSrcChannel;
719 mHdrPlusRawSrcChannel = NULL;
720 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700721 if (mDummyBatchChannel) {
722 delete mDummyBatchChannel;
723 mDummyBatchChannel = NULL;
724 }
725
726 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800727 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700728
729 if (mMetadataChannel) {
730 delete mMetadataChannel;
731 mMetadataChannel = NULL;
732 }
733
734 /* Clean up all channels */
735 if (mCameraInitialized) {
736 if(!mFirstConfiguration){
737 //send the last unconfigure
738 cam_stream_size_info_t stream_config_info;
739 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
740 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
741 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800742 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700743 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700744 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700745 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
746 stream_config_info);
747 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
748 if (rc < 0) {
749 LOGE("set_parms failed for unconfigure");
750 }
751 }
752 deinitParameters();
753 }
754
755 if (mChannelHandle) {
756 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
757 mChannelHandle);
758 LOGH("deleting channel %d", mChannelHandle);
759 mChannelHandle = 0;
760 }
761
762 if (mState != CLOSED)
763 closeCamera();
764
765 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
766 req.mPendingBufferList.clear();
767 }
768 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700769 for (pendingRequestIterator i = mPendingRequestsList.begin();
770 i != mPendingRequestsList.end();) {
771 i = erasePendingRequest(i);
772 }
773 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
774 if (mDefaultMetadata[i])
775 free_camera_metadata(mDefaultMetadata[i]);
776
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800777 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700778
779 pthread_cond_destroy(&mRequestCond);
780
781 pthread_cond_destroy(&mBuffersCond);
782
783 pthread_mutex_destroy(&mMutex);
784 LOGD("X");
785}
786
787/*===========================================================================
788 * FUNCTION : erasePendingRequest
789 *
790 * DESCRIPTION: function to erase a desired pending request after freeing any
791 * allocated memory
792 *
793 * PARAMETERS :
794 * @i : iterator pointing to pending request to be erased
795 *
796 * RETURN : iterator pointing to the next request
797 *==========================================================================*/
798QCamera3HardwareInterface::pendingRequestIterator
799 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
800{
801 if (i->input_buffer != NULL) {
802 free(i->input_buffer);
803 i->input_buffer = NULL;
804 }
805 if (i->settings != NULL)
806 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100807
808 mExpectedInflightDuration -= i->expectedFrameDuration;
809 if (mExpectedInflightDuration < 0) {
810 LOGE("Negative expected in-flight duration!");
811 mExpectedInflightDuration = 0;
812 }
813
Thierry Strudel3d639192016-09-09 11:52:26 -0700814 return mPendingRequestsList.erase(i);
815}
816
817/*===========================================================================
818 * FUNCTION : camEvtHandle
819 *
820 * DESCRIPTION: Function registered to mm-camera-interface to handle events
821 *
822 * PARAMETERS :
823 * @camera_handle : interface layer camera handle
824 * @evt : ptr to event
825 * @user_data : user data ptr
826 *
827 * RETURN : none
828 *==========================================================================*/
829void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
830 mm_camera_event_t *evt,
831 void *user_data)
832{
833 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
834 if (obj && evt) {
835 switch(evt->server_event_type) {
836 case CAM_EVENT_TYPE_DAEMON_DIED:
837 pthread_mutex_lock(&obj->mMutex);
838 obj->mState = ERROR;
839 pthread_mutex_unlock(&obj->mMutex);
840 LOGE("Fatal, camera daemon died");
841 break;
842
843 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
844 LOGD("HAL got request pull from Daemon");
845 pthread_mutex_lock(&obj->mMutex);
846 obj->mWokenUpByDaemon = true;
847 obj->unblockRequestIfNecessary();
848 pthread_mutex_unlock(&obj->mMutex);
849 break;
850
851 default:
852 LOGW("Warning: Unhandled event %d",
853 evt->server_event_type);
854 break;
855 }
856 } else {
857 LOGE("NULL user_data/evt");
858 }
859}
860
861/*===========================================================================
862 * FUNCTION : openCamera
863 *
864 * DESCRIPTION: open camera
865 *
866 * PARAMETERS :
867 * @hw_device : double ptr for camera device struct
868 *
869 * RETURN : int32_t type of status
870 * NO_ERROR -- success
871 * none-zero failure code
872 *==========================================================================*/
873int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
874{
875 int rc = 0;
876 if (mState != CLOSED) {
877 *hw_device = NULL;
878 return PERMISSION_DENIED;
879 }
880
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700881 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800882 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700883 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
884 mCameraId);
885
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700886 if (mCameraHandle) {
887 LOGE("Failure: Camera already opened");
888 return ALREADY_EXISTS;
889 }
890
891 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700892 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700893 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700894 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700895 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700896 if (rc != 0) {
897 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
898 return rc;
899 }
900 }
901 }
902
Thierry Strudel3d639192016-09-09 11:52:26 -0700903 rc = openCamera();
904 if (rc == 0) {
905 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800906 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700907 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700908
909 // Suspend Easel because opening camera failed.
910 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700911 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700912 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
913 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700914 if (suspendErr != 0) {
915 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
916 strerror(-suspendErr), suspendErr);
917 }
918 }
919 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800920 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700921
Thierry Strudel3d639192016-09-09 11:52:26 -0700922 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
923 mCameraId, rc);
924
925 if (rc == NO_ERROR) {
926 mState = OPENED;
927 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800928
Thierry Strudel3d639192016-09-09 11:52:26 -0700929 return rc;
930}
931
932/*===========================================================================
933 * FUNCTION : openCamera
934 *
935 * DESCRIPTION: open camera
936 *
937 * PARAMETERS : none
938 *
939 * RETURN : int32_t type of status
940 * NO_ERROR -- success
941 * none-zero failure code
942 *==========================================================================*/
943int QCamera3HardwareInterface::openCamera()
944{
945 int rc = 0;
946 char value[PROPERTY_VALUE_MAX];
947
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800948 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800949
Thierry Strudel3d639192016-09-09 11:52:26 -0700950 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
951 if (rc < 0) {
952 LOGE("Failed to reserve flash for camera id: %d",
953 mCameraId);
954 return UNKNOWN_ERROR;
955 }
956
957 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
958 if (rc) {
959 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
960 return rc;
961 }
962
963 if (!mCameraHandle) {
964 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
965 return -ENODEV;
966 }
967
968 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
969 camEvtHandle, (void *)this);
970
971 if (rc < 0) {
972 LOGE("Error, failed to register event callback");
973 /* Not closing camera here since it is already handled in destructor */
974 return FAILED_TRANSACTION;
975 }
976
977 mExifParams.debug_params =
978 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
979 if (mExifParams.debug_params) {
980 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
981 } else {
982 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
983 return NO_MEMORY;
984 }
985 mFirstConfiguration = true;
986
987 //Notify display HAL that a camera session is active.
988 //But avoid calling the same during bootup because camera service might open/close
989 //cameras at boot time during its initialization and display service will also internally
990 //wait for camera service to initialize first while calling this display API, resulting in a
991 //deadlock situation. Since boot time camera open/close calls are made only to fetch
992 //capabilities, no need of this display bw optimization.
993 //Use "service.bootanim.exit" property to know boot status.
994 property_get("service.bootanim.exit", value, "0");
995 if (atoi(value) == 1) {
996 pthread_mutex_lock(&gCamLock);
997 if (gNumCameraSessions++ == 0) {
998 setCameraLaunchStatus(true);
999 }
1000 pthread_mutex_unlock(&gCamLock);
1001 }
1002
1003 //fill the session id needed while linking dual cam
1004 pthread_mutex_lock(&gCamLock);
1005 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1006 &sessionId[mCameraId]);
1007 pthread_mutex_unlock(&gCamLock);
1008
1009 if (rc < 0) {
1010 LOGE("Error, failed to get sessiion id");
1011 return UNKNOWN_ERROR;
1012 } else {
1013 //Allocate related cam sync buffer
1014 //this is needed for the payload that goes along with bundling cmd for related
1015 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001016 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1017 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001018 if(rc != OK) {
1019 rc = NO_MEMORY;
1020 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1021 return NO_MEMORY;
1022 }
1023
1024 //Map memory for related cam sync buffer
1025 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001026 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1027 m_pDualCamCmdHeap->getFd(0),
1028 sizeof(cam_dual_camera_cmd_info_t),
1029 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001030 if(rc < 0) {
1031 LOGE("Dualcam: failed to map Related cam sync buffer");
1032 rc = FAILED_TRANSACTION;
1033 return NO_MEMORY;
1034 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001035 m_pDualCamCmdPtr =
1036 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001037 }
1038
1039 LOGH("mCameraId=%d",mCameraId);
1040
1041 return NO_ERROR;
1042}
1043
1044/*===========================================================================
1045 * FUNCTION : closeCamera
1046 *
1047 * DESCRIPTION: close camera
1048 *
1049 * PARAMETERS : none
1050 *
1051 * RETURN : int32_t type of status
1052 * NO_ERROR -- success
1053 * none-zero failure code
1054 *==========================================================================*/
1055int QCamera3HardwareInterface::closeCamera()
1056{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001057 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001058 int rc = NO_ERROR;
1059 char value[PROPERTY_VALUE_MAX];
1060
1061 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1062 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001063
1064 // unmap memory for related cam sync buffer
1065 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001066 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001067 if (NULL != m_pDualCamCmdHeap) {
1068 m_pDualCamCmdHeap->deallocate();
1069 delete m_pDualCamCmdHeap;
1070 m_pDualCamCmdHeap = NULL;
1071 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001072 }
1073
Thierry Strudel3d639192016-09-09 11:52:26 -07001074 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1075 mCameraHandle = NULL;
1076
1077 //reset session id to some invalid id
1078 pthread_mutex_lock(&gCamLock);
1079 sessionId[mCameraId] = 0xDEADBEEF;
1080 pthread_mutex_unlock(&gCamLock);
1081
1082 //Notify display HAL that there is no active camera session
1083 //but avoid calling the same during bootup. Refer to openCamera
1084 //for more details.
1085 property_get("service.bootanim.exit", value, "0");
1086 if (atoi(value) == 1) {
1087 pthread_mutex_lock(&gCamLock);
1088 if (--gNumCameraSessions == 0) {
1089 setCameraLaunchStatus(false);
1090 }
1091 pthread_mutex_unlock(&gCamLock);
1092 }
1093
Thierry Strudel3d639192016-09-09 11:52:26 -07001094 if (mExifParams.debug_params) {
1095 free(mExifParams.debug_params);
1096 mExifParams.debug_params = NULL;
1097 }
1098 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1099 LOGW("Failed to release flash for camera id: %d",
1100 mCameraId);
1101 }
1102 mState = CLOSED;
1103 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1104 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001105
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001106 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001107 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1108 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001109 if (gHdrPlusClient != nullptr) {
1110 // Disable HDR+ mode.
1111 disableHdrPlusModeLocked();
1112 // Disconnect Easel if it's connected.
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001113 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001114 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001115 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001116
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001117 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001118 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001119 if (rc != 0) {
1120 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1121 }
1122
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001123 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001124 if (rc != 0) {
1125 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1126 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001127 }
1128 }
1129
Thierry Strudel3d639192016-09-09 11:52:26 -07001130 return rc;
1131}
1132
1133/*===========================================================================
1134 * FUNCTION : initialize
1135 *
1136 * DESCRIPTION: Initialize frameworks callback functions
1137 *
1138 * PARAMETERS :
1139 * @callback_ops : callback function to frameworks
1140 *
1141 * RETURN :
1142 *
1143 *==========================================================================*/
1144int QCamera3HardwareInterface::initialize(
1145 const struct camera3_callback_ops *callback_ops)
1146{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001147 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001148 int rc;
1149
1150 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1151 pthread_mutex_lock(&mMutex);
1152
1153 // Validate current state
1154 switch (mState) {
1155 case OPENED:
1156 /* valid state */
1157 break;
1158 default:
1159 LOGE("Invalid state %d", mState);
1160 rc = -ENODEV;
1161 goto err1;
1162 }
1163
1164 rc = initParameters();
1165 if (rc < 0) {
1166 LOGE("initParamters failed %d", rc);
1167 goto err1;
1168 }
1169 mCallbackOps = callback_ops;
1170
1171 mChannelHandle = mCameraHandle->ops->add_channel(
1172 mCameraHandle->camera_handle, NULL, NULL, this);
1173 if (mChannelHandle == 0) {
1174 LOGE("add_channel failed");
1175 rc = -ENOMEM;
1176 pthread_mutex_unlock(&mMutex);
1177 return rc;
1178 }
1179
1180 pthread_mutex_unlock(&mMutex);
1181 mCameraInitialized = true;
1182 mState = INITIALIZED;
1183 LOGI("X");
1184 return 0;
1185
1186err1:
1187 pthread_mutex_unlock(&mMutex);
1188 return rc;
1189}
1190
1191/*===========================================================================
1192 * FUNCTION : validateStreamDimensions
1193 *
1194 * DESCRIPTION: Check if the configuration requested are those advertised
1195 *
1196 * PARAMETERS :
1197 * @stream_list : streams to be configured
1198 *
1199 * RETURN :
1200 *
1201 *==========================================================================*/
1202int QCamera3HardwareInterface::validateStreamDimensions(
1203 camera3_stream_configuration_t *streamList)
1204{
1205 int rc = NO_ERROR;
1206 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001207 uint32_t depthWidth = 0;
1208 uint32_t depthHeight = 0;
1209 if (mPDSupported) {
1210 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1211 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1212 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001213
1214 camera3_stream_t *inputStream = NULL;
1215 /*
1216 * Loop through all streams to find input stream if it exists*
1217 */
1218 for (size_t i = 0; i< streamList->num_streams; i++) {
1219 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1220 if (inputStream != NULL) {
1221 LOGE("Error, Multiple input streams requested");
1222 return -EINVAL;
1223 }
1224 inputStream = streamList->streams[i];
1225 }
1226 }
1227 /*
1228 * Loop through all streams requested in configuration
1229 * Check if unsupported sizes have been requested on any of them
1230 */
1231 for (size_t j = 0; j < streamList->num_streams; j++) {
1232 bool sizeFound = false;
1233 camera3_stream_t *newStream = streamList->streams[j];
1234
1235 uint32_t rotatedHeight = newStream->height;
1236 uint32_t rotatedWidth = newStream->width;
1237 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1238 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1239 rotatedHeight = newStream->width;
1240 rotatedWidth = newStream->height;
1241 }
1242
1243 /*
1244 * Sizes are different for each type of stream format check against
1245 * appropriate table.
1246 */
1247 switch (newStream->format) {
1248 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1249 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1250 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001251 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1252 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1253 mPDSupported) {
1254 if ((depthWidth == newStream->width) &&
1255 (depthHeight == newStream->height)) {
1256 sizeFound = true;
1257 }
1258 break;
1259 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001260 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1261 for (size_t i = 0; i < count; i++) {
1262 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1263 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1264 sizeFound = true;
1265 break;
1266 }
1267 }
1268 break;
1269 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001270 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1271 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001272 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001273 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001274 if ((depthSamplesCount == newStream->width) &&
1275 (1 == newStream->height)) {
1276 sizeFound = true;
1277 }
1278 break;
1279 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001280 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1281 /* Verify set size against generated sizes table */
1282 for (size_t i = 0; i < count; i++) {
1283 if (((int32_t)rotatedWidth ==
1284 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1285 ((int32_t)rotatedHeight ==
1286 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1287 sizeFound = true;
1288 break;
1289 }
1290 }
1291 break;
1292 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1293 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1294 default:
1295 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1296 || newStream->stream_type == CAMERA3_STREAM_INPUT
1297 || IS_USAGE_ZSL(newStream->usage)) {
1298 if (((int32_t)rotatedWidth ==
1299 gCamCapability[mCameraId]->active_array_size.width) &&
1300 ((int32_t)rotatedHeight ==
1301 gCamCapability[mCameraId]->active_array_size.height)) {
1302 sizeFound = true;
1303 break;
1304 }
1305 /* We could potentially break here to enforce ZSL stream
1306 * set from frameworks always is full active array size
1307 * but it is not clear from the spc if framework will always
1308 * follow that, also we have logic to override to full array
1309 * size, so keeping the logic lenient at the moment
1310 */
1311 }
1312 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1313 MAX_SIZES_CNT);
1314 for (size_t i = 0; i < count; i++) {
1315 if (((int32_t)rotatedWidth ==
1316 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1317 ((int32_t)rotatedHeight ==
1318 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1319 sizeFound = true;
1320 break;
1321 }
1322 }
1323 break;
1324 } /* End of switch(newStream->format) */
1325
1326 /* We error out even if a single stream has unsupported size set */
1327 if (!sizeFound) {
1328 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1329 rotatedWidth, rotatedHeight, newStream->format,
1330 gCamCapability[mCameraId]->active_array_size.width,
1331 gCamCapability[mCameraId]->active_array_size.height);
1332 rc = -EINVAL;
1333 break;
1334 }
1335 } /* End of for each stream */
1336 return rc;
1337}
1338
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001339/*===========================================================================
1340 * FUNCTION : validateUsageFlags
1341 *
1342 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1343 *
1344 * PARAMETERS :
1345 * @stream_list : streams to be configured
1346 *
1347 * RETURN :
1348 * NO_ERROR if the usage flags are supported
1349 * error code if usage flags are not supported
1350 *
1351 *==========================================================================*/
1352int QCamera3HardwareInterface::validateUsageFlags(
1353 const camera3_stream_configuration_t* streamList)
1354{
1355 for (size_t j = 0; j < streamList->num_streams; j++) {
1356 const camera3_stream_t *newStream = streamList->streams[j];
1357
1358 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1359 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1360 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1361 continue;
1362 }
1363
Jason Leec4cf5032017-05-24 18:31:41 -07001364 // Here we only care whether it's EIS3 or not
1365 char is_type_value[PROPERTY_VALUE_MAX];
1366 property_get("persist.camera.is_type", is_type_value, "4");
1367 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1368 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1369 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1370 isType = IS_TYPE_NONE;
1371
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001372 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1373 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1374 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1375 bool forcePreviewUBWC = true;
1376 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1377 forcePreviewUBWC = false;
1378 }
1379 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001380 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001381 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001382 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001383 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001384 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001385
1386 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1387 // So color spaces will always match.
1388
1389 // Check whether underlying formats of shared streams match.
1390 if (isVideo && isPreview && videoFormat != previewFormat) {
1391 LOGE("Combined video and preview usage flag is not supported");
1392 return -EINVAL;
1393 }
1394 if (isPreview && isZSL && previewFormat != zslFormat) {
1395 LOGE("Combined preview and zsl usage flag is not supported");
1396 return -EINVAL;
1397 }
1398 if (isVideo && isZSL && videoFormat != zslFormat) {
1399 LOGE("Combined video and zsl usage flag is not supported");
1400 return -EINVAL;
1401 }
1402 }
1403 return NO_ERROR;
1404}
1405
1406/*===========================================================================
1407 * FUNCTION : validateUsageFlagsForEis
1408 *
1409 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1410 *
1411 * PARAMETERS :
1412 * @stream_list : streams to be configured
1413 *
1414 * RETURN :
1415 * NO_ERROR if the usage flags are supported
1416 * error code if usage flags are not supported
1417 *
1418 *==========================================================================*/
1419int QCamera3HardwareInterface::validateUsageFlagsForEis(
1420 const camera3_stream_configuration_t* streamList)
1421{
1422 for (size_t j = 0; j < streamList->num_streams; j++) {
1423 const camera3_stream_t *newStream = streamList->streams[j];
1424
1425 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1426 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1427
1428 // Because EIS is "hard-coded" for certain use case, and current
1429 // implementation doesn't support shared preview and video on the same
1430 // stream, return failure if EIS is forced on.
1431 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1432 LOGE("Combined video and preview usage flag is not supported due to EIS");
1433 return -EINVAL;
1434 }
1435 }
1436 return NO_ERROR;
1437}
1438
Thierry Strudel3d639192016-09-09 11:52:26 -07001439/*==============================================================================
1440 * FUNCTION : isSupportChannelNeeded
1441 *
1442 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1443 *
1444 * PARAMETERS :
1445 * @stream_list : streams to be configured
1446 * @stream_config_info : the config info for streams to be configured
1447 *
1448 * RETURN : Boolen true/false decision
1449 *
1450 *==========================================================================*/
1451bool QCamera3HardwareInterface::isSupportChannelNeeded(
1452 camera3_stream_configuration_t *streamList,
1453 cam_stream_size_info_t stream_config_info)
1454{
1455 uint32_t i;
1456 bool pprocRequested = false;
1457 /* Check for conditions where PProc pipeline does not have any streams*/
1458 for (i = 0; i < stream_config_info.num_streams; i++) {
1459 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1460 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1461 pprocRequested = true;
1462 break;
1463 }
1464 }
1465
1466 if (pprocRequested == false )
1467 return true;
1468
1469 /* Dummy stream needed if only raw or jpeg streams present */
1470 for (i = 0; i < streamList->num_streams; i++) {
1471 switch(streamList->streams[i]->format) {
1472 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1473 case HAL_PIXEL_FORMAT_RAW10:
1474 case HAL_PIXEL_FORMAT_RAW16:
1475 case HAL_PIXEL_FORMAT_BLOB:
1476 break;
1477 default:
1478 return false;
1479 }
1480 }
1481 return true;
1482}
1483
1484/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001485 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001486 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001487 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001488 *
1489 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001490 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001491 *
1492 * RETURN : int32_t type of status
1493 * NO_ERROR -- success
1494 * none-zero failure code
1495 *
1496 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001497int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001498{
1499 int32_t rc = NO_ERROR;
1500
1501 cam_dimension_t max_dim = {0, 0};
1502 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1503 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1504 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1505 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1506 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1507 }
1508
1509 clear_metadata_buffer(mParameters);
1510
1511 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1512 max_dim);
1513 if (rc != NO_ERROR) {
1514 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1515 return rc;
1516 }
1517
1518 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1519 if (rc != NO_ERROR) {
1520 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1521 return rc;
1522 }
1523
1524 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001525 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001526
1527 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1528 mParameters);
1529 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001530 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001531 return rc;
1532 }
1533
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001534 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001535 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1536 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1537 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1538 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1539 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001540
1541 return rc;
1542}
1543
1544/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001545 * FUNCTION : getCurrentSensorModeInfo
1546 *
1547 * DESCRIPTION: Get sensor mode information that is currently selected.
1548 *
1549 * PARAMETERS :
1550 * @sensorModeInfo : sensor mode information (output)
1551 *
1552 * RETURN : int32_t type of status
1553 * NO_ERROR -- success
1554 * none-zero failure code
1555 *
1556 *==========================================================================*/
1557int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1558{
1559 int32_t rc = NO_ERROR;
1560
1561 clear_metadata_buffer(mParameters);
1562 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1563
1564 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1565 mParameters);
1566 if (rc != NO_ERROR) {
1567 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1568 return rc;
1569 }
1570
1571 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1572 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1573 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1574 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1575 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1576 sensorModeInfo.num_raw_bits);
1577
1578 return rc;
1579}
1580
1581/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001582 * FUNCTION : addToPPFeatureMask
1583 *
1584 * DESCRIPTION: add additional features to pp feature mask based on
1585 * stream type and usecase
1586 *
1587 * PARAMETERS :
1588 * @stream_format : stream type for feature mask
1589 * @stream_idx : stream idx within postprocess_mask list to change
1590 *
1591 * RETURN : NULL
1592 *
1593 *==========================================================================*/
1594void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1595 uint32_t stream_idx)
1596{
1597 char feature_mask_value[PROPERTY_VALUE_MAX];
1598 cam_feature_mask_t feature_mask;
1599 int args_converted;
1600 int property_len;
1601
1602 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001603#ifdef _LE_CAMERA_
1604 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1605 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1606 property_len = property_get("persist.camera.hal3.feature",
1607 feature_mask_value, swtnr_feature_mask_value);
1608#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001609 property_len = property_get("persist.camera.hal3.feature",
1610 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001611#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001612 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1613 (feature_mask_value[1] == 'x')) {
1614 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1615 } else {
1616 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1617 }
1618 if (1 != args_converted) {
1619 feature_mask = 0;
1620 LOGE("Wrong feature mask %s", feature_mask_value);
1621 return;
1622 }
1623
1624 switch (stream_format) {
1625 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1626 /* Add LLVD to pp feature mask only if video hint is enabled */
1627 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1628 mStreamConfigInfo.postprocess_mask[stream_idx]
1629 |= CAM_QTI_FEATURE_SW_TNR;
1630 LOGH("Added SW TNR to pp feature mask");
1631 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1632 mStreamConfigInfo.postprocess_mask[stream_idx]
1633 |= CAM_QCOM_FEATURE_LLVD;
1634 LOGH("Added LLVD SeeMore to pp feature mask");
1635 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001636 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1637 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1638 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1639 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001640 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1641 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1642 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1643 CAM_QTI_FEATURE_BINNING_CORRECTION;
1644 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001645 break;
1646 }
1647 default:
1648 break;
1649 }
1650 LOGD("PP feature mask %llx",
1651 mStreamConfigInfo.postprocess_mask[stream_idx]);
1652}
1653
1654/*==============================================================================
1655 * FUNCTION : updateFpsInPreviewBuffer
1656 *
1657 * DESCRIPTION: update FPS information in preview buffer.
1658 *
1659 * PARAMETERS :
1660 * @metadata : pointer to metadata buffer
1661 * @frame_number: frame_number to look for in pending buffer list
1662 *
1663 * RETURN : None
1664 *
1665 *==========================================================================*/
1666void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1667 uint32_t frame_number)
1668{
1669 // Mark all pending buffers for this particular request
1670 // with corresponding framerate information
1671 for (List<PendingBuffersInRequest>::iterator req =
1672 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1673 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1674 for(List<PendingBufferInfo>::iterator j =
1675 req->mPendingBufferList.begin();
1676 j != req->mPendingBufferList.end(); j++) {
1677 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1678 if ((req->frame_number == frame_number) &&
1679 (channel->getStreamTypeMask() &
1680 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1681 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1682 CAM_INTF_PARM_FPS_RANGE, metadata) {
1683 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1684 struct private_handle_t *priv_handle =
1685 (struct private_handle_t *)(*(j->buffer));
1686 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1687 }
1688 }
1689 }
1690 }
1691}
1692
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001693/*==============================================================================
1694 * FUNCTION : updateTimeStampInPendingBuffers
1695 *
1696 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1697 * of a frame number
1698 *
1699 * PARAMETERS :
1700 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1701 * @timestamp : timestamp to be set
1702 *
1703 * RETURN : None
1704 *
1705 *==========================================================================*/
1706void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1707 uint32_t frameNumber, nsecs_t timestamp)
1708{
1709 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1710 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1711 if (req->frame_number != frameNumber)
1712 continue;
1713
1714 for (auto k = req->mPendingBufferList.begin();
1715 k != req->mPendingBufferList.end(); k++ ) {
1716 struct private_handle_t *priv_handle =
1717 (struct private_handle_t *) (*(k->buffer));
1718 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1719 }
1720 }
1721 return;
1722}
1723
Thierry Strudel3d639192016-09-09 11:52:26 -07001724/*===========================================================================
1725 * FUNCTION : configureStreams
1726 *
1727 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1728 * and output streams.
1729 *
1730 * PARAMETERS :
1731 * @stream_list : streams to be configured
1732 *
1733 * RETURN :
1734 *
1735 *==========================================================================*/
1736int QCamera3HardwareInterface::configureStreams(
1737 camera3_stream_configuration_t *streamList)
1738{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001739 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001740 int rc = 0;
1741
1742 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001743 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001744 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001745 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001746
1747 return rc;
1748}
1749
1750/*===========================================================================
1751 * FUNCTION : configureStreamsPerfLocked
1752 *
1753 * DESCRIPTION: configureStreams while perfLock is held.
1754 *
1755 * PARAMETERS :
1756 * @stream_list : streams to be configured
1757 *
1758 * RETURN : int32_t type of status
1759 * NO_ERROR -- success
1760 * none-zero failure code
1761 *==========================================================================*/
1762int QCamera3HardwareInterface::configureStreamsPerfLocked(
1763 camera3_stream_configuration_t *streamList)
1764{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001765 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001766 int rc = 0;
1767
1768 // Sanity check stream_list
1769 if (streamList == NULL) {
1770 LOGE("NULL stream configuration");
1771 return BAD_VALUE;
1772 }
1773 if (streamList->streams == NULL) {
1774 LOGE("NULL stream list");
1775 return BAD_VALUE;
1776 }
1777
1778 if (streamList->num_streams < 1) {
1779 LOGE("Bad number of streams requested: %d",
1780 streamList->num_streams);
1781 return BAD_VALUE;
1782 }
1783
1784 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1785 LOGE("Maximum number of streams %d exceeded: %d",
1786 MAX_NUM_STREAMS, streamList->num_streams);
1787 return BAD_VALUE;
1788 }
1789
Jason Leec4cf5032017-05-24 18:31:41 -07001790 mOpMode = streamList->operation_mode;
1791 LOGD("mOpMode: %d", mOpMode);
1792
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001793 rc = validateUsageFlags(streamList);
1794 if (rc != NO_ERROR) {
1795 return rc;
1796 }
1797
Thierry Strudel3d639192016-09-09 11:52:26 -07001798 /* first invalidate all the steams in the mStreamList
1799 * if they appear again, they will be validated */
1800 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1801 it != mStreamInfo.end(); it++) {
1802 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1803 if (channel) {
1804 channel->stop();
1805 }
1806 (*it)->status = INVALID;
1807 }
1808
1809 if (mRawDumpChannel) {
1810 mRawDumpChannel->stop();
1811 delete mRawDumpChannel;
1812 mRawDumpChannel = NULL;
1813 }
1814
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001815 if (mHdrPlusRawSrcChannel) {
1816 mHdrPlusRawSrcChannel->stop();
1817 delete mHdrPlusRawSrcChannel;
1818 mHdrPlusRawSrcChannel = NULL;
1819 }
1820
Thierry Strudel3d639192016-09-09 11:52:26 -07001821 if (mSupportChannel)
1822 mSupportChannel->stop();
1823
1824 if (mAnalysisChannel) {
1825 mAnalysisChannel->stop();
1826 }
1827 if (mMetadataChannel) {
1828 /* If content of mStreamInfo is not 0, there is metadata stream */
1829 mMetadataChannel->stop();
1830 }
1831 if (mChannelHandle) {
1832 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07001833 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001834 LOGD("stopping channel %d", mChannelHandle);
1835 }
1836
1837 pthread_mutex_lock(&mMutex);
1838
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001839 mPictureChannel = NULL;
1840
Thierry Strudel3d639192016-09-09 11:52:26 -07001841 // Check state
1842 switch (mState) {
1843 case INITIALIZED:
1844 case CONFIGURED:
1845 case STARTED:
1846 /* valid state */
1847 break;
1848 default:
1849 LOGE("Invalid state %d", mState);
1850 pthread_mutex_unlock(&mMutex);
1851 return -ENODEV;
1852 }
1853
1854 /* Check whether we have video stream */
1855 m_bIs4KVideo = false;
1856 m_bIsVideo = false;
1857 m_bEisSupportedSize = false;
1858 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001859 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001860 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001861 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001862 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001863 uint32_t videoWidth = 0U;
1864 uint32_t videoHeight = 0U;
1865 size_t rawStreamCnt = 0;
1866 size_t stallStreamCnt = 0;
1867 size_t processedStreamCnt = 0;
1868 // Number of streams on ISP encoder path
1869 size_t numStreamsOnEncoder = 0;
1870 size_t numYuv888OnEncoder = 0;
1871 bool bYuv888OverrideJpeg = false;
1872 cam_dimension_t largeYuv888Size = {0, 0};
1873 cam_dimension_t maxViewfinderSize = {0, 0};
1874 bool bJpegExceeds4K = false;
1875 bool bJpegOnEncoder = false;
1876 bool bUseCommonFeatureMask = false;
1877 cam_feature_mask_t commonFeatureMask = 0;
1878 bool bSmallJpegSize = false;
1879 uint32_t width_ratio;
1880 uint32_t height_ratio;
1881 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1882 camera3_stream_t *inputStream = NULL;
1883 bool isJpeg = false;
1884 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001885 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001886 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001887
1888 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1889
1890 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001891 uint8_t eis_prop_set;
1892 uint32_t maxEisWidth = 0;
1893 uint32_t maxEisHeight = 0;
1894
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001895 // Initialize all instant AEC related variables
1896 mInstantAEC = false;
1897 mResetInstantAEC = false;
1898 mInstantAECSettledFrameNumber = 0;
1899 mAecSkipDisplayFrameBound = 0;
1900 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001901 mCurrFeatureState = 0;
1902 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001903
Thierry Strudel3d639192016-09-09 11:52:26 -07001904 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1905
1906 size_t count = IS_TYPE_MAX;
1907 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1908 for (size_t i = 0; i < count; i++) {
1909 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001910 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1911 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001912 break;
1913 }
1914 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001915
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001916 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001917 maxEisWidth = MAX_EIS_WIDTH;
1918 maxEisHeight = MAX_EIS_HEIGHT;
1919 }
1920
1921 /* EIS setprop control */
1922 char eis_prop[PROPERTY_VALUE_MAX];
1923 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001924 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001925 eis_prop_set = (uint8_t)atoi(eis_prop);
1926
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001927 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001928 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1929
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001930 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1931 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001932
Thierry Strudel3d639192016-09-09 11:52:26 -07001933 /* stream configurations */
1934 for (size_t i = 0; i < streamList->num_streams; i++) {
1935 camera3_stream_t *newStream = streamList->streams[i];
1936 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1937 "height = %d, rotation = %d, usage = 0x%x",
1938 i, newStream->stream_type, newStream->format,
1939 newStream->width, newStream->height, newStream->rotation,
1940 newStream->usage);
1941 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1942 newStream->stream_type == CAMERA3_STREAM_INPUT){
1943 isZsl = true;
1944 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001945 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1946 IS_USAGE_PREVIEW(newStream->usage)) {
1947 isPreview = true;
1948 }
1949
Thierry Strudel3d639192016-09-09 11:52:26 -07001950 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1951 inputStream = newStream;
1952 }
1953
Emilian Peev7650c122017-01-19 08:24:33 -08001954 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1955 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001956 isJpeg = true;
1957 jpegSize.width = newStream->width;
1958 jpegSize.height = newStream->height;
1959 if (newStream->width > VIDEO_4K_WIDTH ||
1960 newStream->height > VIDEO_4K_HEIGHT)
1961 bJpegExceeds4K = true;
1962 }
1963
1964 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1965 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1966 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001967 // In HAL3 we can have multiple different video streams.
1968 // The variables video width and height are used below as
1969 // dimensions of the biggest of them
1970 if (videoWidth < newStream->width ||
1971 videoHeight < newStream->height) {
1972 videoWidth = newStream->width;
1973 videoHeight = newStream->height;
1974 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001975 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1976 (VIDEO_4K_HEIGHT <= newStream->height)) {
1977 m_bIs4KVideo = true;
1978 }
1979 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1980 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001981
Thierry Strudel3d639192016-09-09 11:52:26 -07001982 }
1983 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1984 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1985 switch (newStream->format) {
1986 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001987 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1988 depthPresent = true;
1989 break;
1990 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001991 stallStreamCnt++;
1992 if (isOnEncoder(maxViewfinderSize, newStream->width,
1993 newStream->height)) {
1994 numStreamsOnEncoder++;
1995 bJpegOnEncoder = true;
1996 }
1997 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1998 newStream->width);
1999 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
2000 newStream->height);;
2001 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2002 "FATAL: max_downscale_factor cannot be zero and so assert");
2003 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2004 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2005 LOGH("Setting small jpeg size flag to true");
2006 bSmallJpegSize = true;
2007 }
2008 break;
2009 case HAL_PIXEL_FORMAT_RAW10:
2010 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2011 case HAL_PIXEL_FORMAT_RAW16:
2012 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002013 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2014 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2015 pdStatCount++;
2016 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002017 break;
2018 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2019 processedStreamCnt++;
2020 if (isOnEncoder(maxViewfinderSize, newStream->width,
2021 newStream->height)) {
2022 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2023 !IS_USAGE_ZSL(newStream->usage)) {
2024 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2025 }
2026 numStreamsOnEncoder++;
2027 }
2028 break;
2029 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2030 processedStreamCnt++;
2031 if (isOnEncoder(maxViewfinderSize, newStream->width,
2032 newStream->height)) {
2033 // If Yuv888 size is not greater than 4K, set feature mask
2034 // to SUPERSET so that it support concurrent request on
2035 // YUV and JPEG.
2036 if (newStream->width <= VIDEO_4K_WIDTH &&
2037 newStream->height <= VIDEO_4K_HEIGHT) {
2038 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2039 }
2040 numStreamsOnEncoder++;
2041 numYuv888OnEncoder++;
2042 largeYuv888Size.width = newStream->width;
2043 largeYuv888Size.height = newStream->height;
2044 }
2045 break;
2046 default:
2047 processedStreamCnt++;
2048 if (isOnEncoder(maxViewfinderSize, newStream->width,
2049 newStream->height)) {
2050 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2051 numStreamsOnEncoder++;
2052 }
2053 break;
2054 }
2055
2056 }
2057 }
2058
2059 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2060 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2061 !m_bIsVideo) {
2062 m_bEisEnable = false;
2063 }
2064
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002065 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2066 pthread_mutex_unlock(&mMutex);
2067 return -EINVAL;
2068 }
2069
Thierry Strudel54dc9782017-02-15 12:12:10 -08002070 uint8_t forceEnableTnr = 0;
2071 char tnr_prop[PROPERTY_VALUE_MAX];
2072 memset(tnr_prop, 0, sizeof(tnr_prop));
2073 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2074 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2075
Thierry Strudel3d639192016-09-09 11:52:26 -07002076 /* Logic to enable/disable TNR based on specific config size/etc.*/
2077 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002078 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2079 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002080 else if (forceEnableTnr)
2081 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002082
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002083 char videoHdrProp[PROPERTY_VALUE_MAX];
2084 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2085 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2086 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2087
2088 if (hdr_mode_prop == 1 && m_bIsVideo &&
2089 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2090 m_bVideoHdrEnabled = true;
2091 else
2092 m_bVideoHdrEnabled = false;
2093
2094
Thierry Strudel3d639192016-09-09 11:52:26 -07002095 /* Check if num_streams is sane */
2096 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2097 rawStreamCnt > MAX_RAW_STREAMS ||
2098 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2099 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2100 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2101 pthread_mutex_unlock(&mMutex);
2102 return -EINVAL;
2103 }
2104 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002105 if (isZsl && m_bIs4KVideo) {
2106 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002107 pthread_mutex_unlock(&mMutex);
2108 return -EINVAL;
2109 }
2110 /* Check if stream sizes are sane */
2111 if (numStreamsOnEncoder > 2) {
2112 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2113 pthread_mutex_unlock(&mMutex);
2114 return -EINVAL;
2115 } else if (1 < numStreamsOnEncoder){
2116 bUseCommonFeatureMask = true;
2117 LOGH("Multiple streams above max viewfinder size, common mask needed");
2118 }
2119
2120 /* Check if BLOB size is greater than 4k in 4k recording case */
2121 if (m_bIs4KVideo && bJpegExceeds4K) {
2122 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2123 pthread_mutex_unlock(&mMutex);
2124 return -EINVAL;
2125 }
2126
Emilian Peev7650c122017-01-19 08:24:33 -08002127 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2128 depthPresent) {
2129 LOGE("HAL doesn't support depth streams in HFR mode!");
2130 pthread_mutex_unlock(&mMutex);
2131 return -EINVAL;
2132 }
2133
Thierry Strudel3d639192016-09-09 11:52:26 -07002134 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2135 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2136 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2137 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2138 // configurations:
2139 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2140 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2141 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2142 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2143 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2144 __func__);
2145 pthread_mutex_unlock(&mMutex);
2146 return -EINVAL;
2147 }
2148
2149 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2150 // the YUV stream's size is greater or equal to the JPEG size, set common
2151 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2152 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2153 jpegSize.width, jpegSize.height) &&
2154 largeYuv888Size.width > jpegSize.width &&
2155 largeYuv888Size.height > jpegSize.height) {
2156 bYuv888OverrideJpeg = true;
2157 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2158 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2159 }
2160
2161 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2162 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2163 commonFeatureMask);
2164 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2165 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2166
2167 rc = validateStreamDimensions(streamList);
2168 if (rc == NO_ERROR) {
2169 rc = validateStreamRotations(streamList);
2170 }
2171 if (rc != NO_ERROR) {
2172 LOGE("Invalid stream configuration requested!");
2173 pthread_mutex_unlock(&mMutex);
2174 return rc;
2175 }
2176
Emilian Peev0f3c3162017-03-15 12:57:46 +00002177 if (1 < pdStatCount) {
2178 LOGE("HAL doesn't support multiple PD streams");
2179 pthread_mutex_unlock(&mMutex);
2180 return -EINVAL;
2181 }
2182
2183 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2184 (1 == pdStatCount)) {
2185 LOGE("HAL doesn't support PD streams in HFR mode!");
2186 pthread_mutex_unlock(&mMutex);
2187 return -EINVAL;
2188 }
2189
Thierry Strudel3d639192016-09-09 11:52:26 -07002190 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2191 for (size_t i = 0; i < streamList->num_streams; i++) {
2192 camera3_stream_t *newStream = streamList->streams[i];
2193 LOGH("newStream type = %d, stream format = %d "
2194 "stream size : %d x %d, stream rotation = %d",
2195 newStream->stream_type, newStream->format,
2196 newStream->width, newStream->height, newStream->rotation);
2197 //if the stream is in the mStreamList validate it
2198 bool stream_exists = false;
2199 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2200 it != mStreamInfo.end(); it++) {
2201 if ((*it)->stream == newStream) {
2202 QCamera3ProcessingChannel *channel =
2203 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2204 stream_exists = true;
2205 if (channel)
2206 delete channel;
2207 (*it)->status = VALID;
2208 (*it)->stream->priv = NULL;
2209 (*it)->channel = NULL;
2210 }
2211 }
2212 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2213 //new stream
2214 stream_info_t* stream_info;
2215 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2216 if (!stream_info) {
2217 LOGE("Could not allocate stream info");
2218 rc = -ENOMEM;
2219 pthread_mutex_unlock(&mMutex);
2220 return rc;
2221 }
2222 stream_info->stream = newStream;
2223 stream_info->status = VALID;
2224 stream_info->channel = NULL;
2225 mStreamInfo.push_back(stream_info);
2226 }
2227 /* Covers Opaque ZSL and API1 F/W ZSL */
2228 if (IS_USAGE_ZSL(newStream->usage)
2229 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2230 if (zslStream != NULL) {
2231 LOGE("Multiple input/reprocess streams requested!");
2232 pthread_mutex_unlock(&mMutex);
2233 return BAD_VALUE;
2234 }
2235 zslStream = newStream;
2236 }
2237 /* Covers YUV reprocess */
2238 if (inputStream != NULL) {
2239 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2240 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2241 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2242 && inputStream->width == newStream->width
2243 && inputStream->height == newStream->height) {
2244 if (zslStream != NULL) {
2245 /* This scenario indicates multiple YUV streams with same size
2246 * as input stream have been requested, since zsl stream handle
2247 * is solely use for the purpose of overriding the size of streams
2248 * which share h/w streams we will just make a guess here as to
2249 * which of the stream is a ZSL stream, this will be refactored
2250 * once we make generic logic for streams sharing encoder output
2251 */
2252 LOGH("Warning, Multiple ip/reprocess streams requested!");
2253 }
2254 zslStream = newStream;
2255 }
2256 }
2257 }
2258
2259 /* If a zsl stream is set, we know that we have configured at least one input or
2260 bidirectional stream */
2261 if (NULL != zslStream) {
2262 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2263 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2264 mInputStreamInfo.format = zslStream->format;
2265 mInputStreamInfo.usage = zslStream->usage;
2266 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2267 mInputStreamInfo.dim.width,
2268 mInputStreamInfo.dim.height,
2269 mInputStreamInfo.format, mInputStreamInfo.usage);
2270 }
2271
2272 cleanAndSortStreamInfo();
2273 if (mMetadataChannel) {
2274 delete mMetadataChannel;
2275 mMetadataChannel = NULL;
2276 }
2277 if (mSupportChannel) {
2278 delete mSupportChannel;
2279 mSupportChannel = NULL;
2280 }
2281
2282 if (mAnalysisChannel) {
2283 delete mAnalysisChannel;
2284 mAnalysisChannel = NULL;
2285 }
2286
2287 if (mDummyBatchChannel) {
2288 delete mDummyBatchChannel;
2289 mDummyBatchChannel = NULL;
2290 }
2291
Emilian Peev7650c122017-01-19 08:24:33 -08002292 if (mDepthChannel) {
2293 mDepthChannel = NULL;
2294 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002295 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002296
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002297 mShutterDispatcher.clear();
2298 mOutputBufferDispatcher.clear();
2299
Thierry Strudel2896d122017-02-23 19:18:03 -08002300 char is_type_value[PROPERTY_VALUE_MAX];
2301 property_get("persist.camera.is_type", is_type_value, "4");
2302 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2303
Binhao Line406f062017-05-03 14:39:44 -07002304 char property_value[PROPERTY_VALUE_MAX];
2305 property_get("persist.camera.gzoom.at", property_value, "0");
2306 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002307 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2308 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2309 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2310 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002311
2312 property_get("persist.camera.gzoom.4k", property_value, "0");
2313 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2314
Thierry Strudel3d639192016-09-09 11:52:26 -07002315 //Create metadata channel and initialize it
2316 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2317 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2318 gCamCapability[mCameraId]->color_arrangement);
2319 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2320 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002321 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002322 if (mMetadataChannel == NULL) {
2323 LOGE("failed to allocate metadata channel");
2324 rc = -ENOMEM;
2325 pthread_mutex_unlock(&mMutex);
2326 return rc;
2327 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002328 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002329 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2330 if (rc < 0) {
2331 LOGE("metadata channel initialization failed");
2332 delete mMetadataChannel;
2333 mMetadataChannel = NULL;
2334 pthread_mutex_unlock(&mMutex);
2335 return rc;
2336 }
2337
Thierry Strudel2896d122017-02-23 19:18:03 -08002338 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002339 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002340 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002341 // Keep track of preview/video streams indices.
2342 // There could be more than one preview streams, but only one video stream.
2343 int32_t video_stream_idx = -1;
2344 int32_t preview_stream_idx[streamList->num_streams];
2345 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002346 bool previewTnr[streamList->num_streams];
2347 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2348 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2349 // Loop through once to determine preview TNR conditions before creating channels.
2350 for (size_t i = 0; i < streamList->num_streams; i++) {
2351 camera3_stream_t *newStream = streamList->streams[i];
2352 uint32_t stream_usage = newStream->usage;
2353 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2354 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2355 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2356 video_stream_idx = (int32_t)i;
2357 else
2358 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2359 }
2360 }
2361 // By default, preview stream TNR is disabled.
2362 // Enable TNR to the preview stream if all conditions below are satisfied:
2363 // 1. preview resolution == video resolution.
2364 // 2. video stream TNR is enabled.
2365 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2366 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2367 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2368 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2369 if (m_bTnrEnabled && m_bTnrVideo &&
2370 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2371 video_stream->width == preview_stream->width &&
2372 video_stream->height == preview_stream->height) {
2373 previewTnr[preview_stream_idx[i]] = true;
2374 }
2375 }
2376
Thierry Strudel3d639192016-09-09 11:52:26 -07002377 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2378 /* Allocate channel objects for the requested streams */
2379 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002380
Thierry Strudel3d639192016-09-09 11:52:26 -07002381 camera3_stream_t *newStream = streamList->streams[i];
2382 uint32_t stream_usage = newStream->usage;
2383 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2384 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2385 struct camera_info *p_info = NULL;
2386 pthread_mutex_lock(&gCamLock);
2387 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2388 pthread_mutex_unlock(&gCamLock);
2389 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2390 || IS_USAGE_ZSL(newStream->usage)) &&
2391 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002392 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002393 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002394 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2395 if (bUseCommonFeatureMask)
2396 zsl_ppmask = commonFeatureMask;
2397 else
2398 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002399 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002400 if (numStreamsOnEncoder > 0)
2401 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2402 else
2403 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002404 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002405 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002406 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002407 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002408 LOGH("Input stream configured, reprocess config");
2409 } else {
2410 //for non zsl streams find out the format
2411 switch (newStream->format) {
2412 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2413 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002414 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002415 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2416 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2417 /* add additional features to pp feature mask */
2418 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2419 mStreamConfigInfo.num_streams);
2420
2421 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2422 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2423 CAM_STREAM_TYPE_VIDEO;
2424 if (m_bTnrEnabled && m_bTnrVideo) {
2425 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2426 CAM_QCOM_FEATURE_CPP_TNR;
2427 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2428 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2429 ~CAM_QCOM_FEATURE_CDS;
2430 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002431 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2432 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2433 CAM_QTI_FEATURE_PPEISCORE;
2434 }
Binhao Line406f062017-05-03 14:39:44 -07002435 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2436 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2437 CAM_QCOM_FEATURE_GOOG_ZOOM;
2438 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002439 } else {
2440 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2441 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002442 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002443 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2444 CAM_QCOM_FEATURE_CPP_TNR;
2445 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2446 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2447 ~CAM_QCOM_FEATURE_CDS;
2448 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002449 if(!m_bSwTnrPreview) {
2450 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2451 ~CAM_QTI_FEATURE_SW_TNR;
2452 }
Binhao Line406f062017-05-03 14:39:44 -07002453 if (is_goog_zoom_preview_enabled) {
2454 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2455 CAM_QCOM_FEATURE_GOOG_ZOOM;
2456 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002457 padding_info.width_padding = mSurfaceStridePadding;
2458 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002459 previewSize.width = (int32_t)newStream->width;
2460 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002461 }
2462 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2463 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2464 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2465 newStream->height;
2466 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2467 newStream->width;
2468 }
2469 }
2470 break;
2471 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002472 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002473 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2474 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2475 if (bUseCommonFeatureMask)
2476 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2477 commonFeatureMask;
2478 else
2479 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2480 CAM_QCOM_FEATURE_NONE;
2481 } else {
2482 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2483 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2484 }
2485 break;
2486 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002487 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002488 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2489 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2490 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2491 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2492 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002493 /* Remove rotation if it is not supported
2494 for 4K LiveVideo snapshot case (online processing) */
2495 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2496 CAM_QCOM_FEATURE_ROTATION)) {
2497 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2498 &= ~CAM_QCOM_FEATURE_ROTATION;
2499 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002500 } else {
2501 if (bUseCommonFeatureMask &&
2502 isOnEncoder(maxViewfinderSize, newStream->width,
2503 newStream->height)) {
2504 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2505 } else {
2506 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2507 }
2508 }
2509 if (isZsl) {
2510 if (zslStream) {
2511 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2512 (int32_t)zslStream->width;
2513 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2514 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002515 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2516 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002517 } else {
2518 LOGE("Error, No ZSL stream identified");
2519 pthread_mutex_unlock(&mMutex);
2520 return -EINVAL;
2521 }
2522 } else if (m_bIs4KVideo) {
2523 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2524 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2525 } else if (bYuv888OverrideJpeg) {
2526 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2527 (int32_t)largeYuv888Size.width;
2528 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2529 (int32_t)largeYuv888Size.height;
2530 }
2531 break;
2532 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2533 case HAL_PIXEL_FORMAT_RAW16:
2534 case HAL_PIXEL_FORMAT_RAW10:
2535 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2536 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2537 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002538 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2539 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2540 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2541 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2542 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2543 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2544 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2545 gCamCapability[mCameraId]->dt[mPDIndex];
2546 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2547 gCamCapability[mCameraId]->vc[mPDIndex];
2548 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002549 break;
2550 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002551 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002552 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2553 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2554 break;
2555 }
2556 }
2557
2558 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2559 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2560 gCamCapability[mCameraId]->color_arrangement);
2561
2562 if (newStream->priv == NULL) {
2563 //New stream, construct channel
2564 switch (newStream->stream_type) {
2565 case CAMERA3_STREAM_INPUT:
2566 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2567 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2568 break;
2569 case CAMERA3_STREAM_BIDIRECTIONAL:
2570 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2571 GRALLOC_USAGE_HW_CAMERA_WRITE;
2572 break;
2573 case CAMERA3_STREAM_OUTPUT:
2574 /* For video encoding stream, set read/write rarely
2575 * flag so that they may be set to un-cached */
2576 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2577 newStream->usage |=
2578 (GRALLOC_USAGE_SW_READ_RARELY |
2579 GRALLOC_USAGE_SW_WRITE_RARELY |
2580 GRALLOC_USAGE_HW_CAMERA_WRITE);
2581 else if (IS_USAGE_ZSL(newStream->usage))
2582 {
2583 LOGD("ZSL usage flag skipping");
2584 }
2585 else if (newStream == zslStream
2586 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2587 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2588 } else
2589 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2590 break;
2591 default:
2592 LOGE("Invalid stream_type %d", newStream->stream_type);
2593 break;
2594 }
2595
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002596 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002597 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2598 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2599 QCamera3ProcessingChannel *channel = NULL;
2600 switch (newStream->format) {
2601 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2602 if ((newStream->usage &
2603 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2604 (streamList->operation_mode ==
2605 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2606 ) {
2607 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2608 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002609 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002610 this,
2611 newStream,
2612 (cam_stream_type_t)
2613 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2614 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2615 mMetadataChannel,
2616 0); //heap buffers are not required for HFR video channel
2617 if (channel == NULL) {
2618 LOGE("allocation of channel failed");
2619 pthread_mutex_unlock(&mMutex);
2620 return -ENOMEM;
2621 }
2622 //channel->getNumBuffers() will return 0 here so use
2623 //MAX_INFLIGH_HFR_REQUESTS
2624 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2625 newStream->priv = channel;
2626 LOGI("num video buffers in HFR mode: %d",
2627 MAX_INFLIGHT_HFR_REQUESTS);
2628 } else {
2629 /* Copy stream contents in HFR preview only case to create
2630 * dummy batch channel so that sensor streaming is in
2631 * HFR mode */
2632 if (!m_bIsVideo && (streamList->operation_mode ==
2633 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2634 mDummyBatchStream = *newStream;
2635 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002636 int bufferCount = MAX_INFLIGHT_REQUESTS;
2637 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2638 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002639 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2640 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2641 bufferCount = m_bIs4KVideo ?
2642 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2643 }
2644
Thierry Strudel2896d122017-02-23 19:18:03 -08002645 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002646 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2647 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002648 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002649 this,
2650 newStream,
2651 (cam_stream_type_t)
2652 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2653 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2654 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002655 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002656 if (channel == NULL) {
2657 LOGE("allocation of channel failed");
2658 pthread_mutex_unlock(&mMutex);
2659 return -ENOMEM;
2660 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002661 /* disable UBWC for preview, though supported,
2662 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002663 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002664 (previewSize.width == (int32_t)videoWidth)&&
2665 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002666 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002667 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002668 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002669 /* When goog_zoom is linked to the preview or video stream,
2670 * disable ubwc to the linked stream */
2671 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2672 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2673 channel->setUBWCEnabled(false);
2674 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002675 newStream->max_buffers = channel->getNumBuffers();
2676 newStream->priv = channel;
2677 }
2678 break;
2679 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2680 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2681 mChannelHandle,
2682 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002683 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002684 this,
2685 newStream,
2686 (cam_stream_type_t)
2687 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2688 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2689 mMetadataChannel);
2690 if (channel == NULL) {
2691 LOGE("allocation of YUV channel failed");
2692 pthread_mutex_unlock(&mMutex);
2693 return -ENOMEM;
2694 }
2695 newStream->max_buffers = channel->getNumBuffers();
2696 newStream->priv = channel;
2697 break;
2698 }
2699 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2700 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002701 case HAL_PIXEL_FORMAT_RAW10: {
2702 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2703 (HAL_DATASPACE_DEPTH != newStream->data_space))
2704 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002705 mRawChannel = new QCamera3RawChannel(
2706 mCameraHandle->camera_handle, mChannelHandle,
2707 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002708 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002709 this, newStream,
2710 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002711 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002712 if (mRawChannel == NULL) {
2713 LOGE("allocation of raw channel failed");
2714 pthread_mutex_unlock(&mMutex);
2715 return -ENOMEM;
2716 }
2717 newStream->max_buffers = mRawChannel->getNumBuffers();
2718 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2719 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002720 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002721 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002722 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2723 mDepthChannel = new QCamera3DepthChannel(
2724 mCameraHandle->camera_handle, mChannelHandle,
2725 mCameraHandle->ops, NULL, NULL, &padding_info,
2726 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2727 mMetadataChannel);
2728 if (NULL == mDepthChannel) {
2729 LOGE("Allocation of depth channel failed");
2730 pthread_mutex_unlock(&mMutex);
2731 return NO_MEMORY;
2732 }
2733 newStream->priv = mDepthChannel;
2734 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2735 } else {
2736 // Max live snapshot inflight buffer is 1. This is to mitigate
2737 // frame drop issues for video snapshot. The more buffers being
2738 // allocated, the more frame drops there are.
2739 mPictureChannel = new QCamera3PicChannel(
2740 mCameraHandle->camera_handle, mChannelHandle,
2741 mCameraHandle->ops, captureResultCb,
2742 setBufferErrorStatus, &padding_info, this, newStream,
2743 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2744 m_bIs4KVideo, isZsl, mMetadataChannel,
2745 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2746 if (mPictureChannel == NULL) {
2747 LOGE("allocation of channel failed");
2748 pthread_mutex_unlock(&mMutex);
2749 return -ENOMEM;
2750 }
2751 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2752 newStream->max_buffers = mPictureChannel->getNumBuffers();
2753 mPictureChannel->overrideYuvSize(
2754 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2755 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002756 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002757 break;
2758
2759 default:
2760 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002761 pthread_mutex_unlock(&mMutex);
2762 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002763 }
2764 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2765 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2766 } else {
2767 LOGE("Error, Unknown stream type");
2768 pthread_mutex_unlock(&mMutex);
2769 return -EINVAL;
2770 }
2771
2772 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002773 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002774 // Here we only care whether it's EIS3 or not
2775 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2776 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2777 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2778 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002779 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002780 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002781 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002782 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2783 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2784 }
2785 }
2786
2787 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2788 it != mStreamInfo.end(); it++) {
2789 if ((*it)->stream == newStream) {
2790 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2791 break;
2792 }
2793 }
2794 } else {
2795 // Channel already exists for this stream
2796 // Do nothing for now
2797 }
2798 padding_info = gCamCapability[mCameraId]->padding_info;
2799
Emilian Peev7650c122017-01-19 08:24:33 -08002800 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002801 * since there is no real stream associated with it
2802 */
Emilian Peev7650c122017-01-19 08:24:33 -08002803 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002804 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2805 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002806 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002807 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002808 }
2809
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002810 // Let buffer dispatcher know the configured streams.
2811 mOutputBufferDispatcher.configureStreams(streamList);
2812
Thierry Strudel2896d122017-02-23 19:18:03 -08002813 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2814 onlyRaw = false;
2815 }
2816
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002817 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002818 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002819 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002820 cam_analysis_info_t analysisInfo;
2821 int32_t ret = NO_ERROR;
2822 ret = mCommon.getAnalysisInfo(
2823 FALSE,
2824 analysisFeatureMask,
2825 &analysisInfo);
2826 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002827 cam_color_filter_arrangement_t analysis_color_arrangement =
2828 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2829 CAM_FILTER_ARRANGEMENT_Y :
2830 gCamCapability[mCameraId]->color_arrangement);
2831 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2832 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002833 cam_dimension_t analysisDim;
2834 analysisDim = mCommon.getMatchingDimension(previewSize,
2835 analysisInfo.analysis_recommended_res);
2836
2837 mAnalysisChannel = new QCamera3SupportChannel(
2838 mCameraHandle->camera_handle,
2839 mChannelHandle,
2840 mCameraHandle->ops,
2841 &analysisInfo.analysis_padding_info,
2842 analysisFeatureMask,
2843 CAM_STREAM_TYPE_ANALYSIS,
2844 &analysisDim,
2845 (analysisInfo.analysis_format
2846 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2847 : CAM_FORMAT_YUV_420_NV21),
2848 analysisInfo.hw_analysis_supported,
2849 gCamCapability[mCameraId]->color_arrangement,
2850 this,
2851 0); // force buffer count to 0
2852 } else {
2853 LOGW("getAnalysisInfo failed, ret = %d", ret);
2854 }
2855 if (!mAnalysisChannel) {
2856 LOGW("Analysis channel cannot be created");
2857 }
2858 }
2859
Thierry Strudel3d639192016-09-09 11:52:26 -07002860 //RAW DUMP channel
2861 if (mEnableRawDump && isRawStreamRequested == false){
2862 cam_dimension_t rawDumpSize;
2863 rawDumpSize = getMaxRawSize(mCameraId);
2864 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2865 setPAAFSupport(rawDumpFeatureMask,
2866 CAM_STREAM_TYPE_RAW,
2867 gCamCapability[mCameraId]->color_arrangement);
2868 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2869 mChannelHandle,
2870 mCameraHandle->ops,
2871 rawDumpSize,
2872 &padding_info,
2873 this, rawDumpFeatureMask);
2874 if (!mRawDumpChannel) {
2875 LOGE("Raw Dump channel cannot be created");
2876 pthread_mutex_unlock(&mMutex);
2877 return -ENOMEM;
2878 }
2879 }
2880
Thierry Strudel3d639192016-09-09 11:52:26 -07002881 if (mAnalysisChannel) {
2882 cam_analysis_info_t analysisInfo;
2883 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2884 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2885 CAM_STREAM_TYPE_ANALYSIS;
2886 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2887 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002888 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002889 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2890 &analysisInfo);
2891 if (rc != NO_ERROR) {
2892 LOGE("getAnalysisInfo failed, ret = %d", rc);
2893 pthread_mutex_unlock(&mMutex);
2894 return rc;
2895 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002896 cam_color_filter_arrangement_t analysis_color_arrangement =
2897 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2898 CAM_FILTER_ARRANGEMENT_Y :
2899 gCamCapability[mCameraId]->color_arrangement);
2900 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2901 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2902 analysis_color_arrangement);
2903
Thierry Strudel3d639192016-09-09 11:52:26 -07002904 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002905 mCommon.getMatchingDimension(previewSize,
2906 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002907 mStreamConfigInfo.num_streams++;
2908 }
2909
Thierry Strudel2896d122017-02-23 19:18:03 -08002910 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002911 cam_analysis_info_t supportInfo;
2912 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2913 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2914 setPAAFSupport(callbackFeatureMask,
2915 CAM_STREAM_TYPE_CALLBACK,
2916 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002917 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002918 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002919 if (ret != NO_ERROR) {
2920 /* Ignore the error for Mono camera
2921 * because the PAAF bit mask is only set
2922 * for CAM_STREAM_TYPE_ANALYSIS stream type
2923 */
2924 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2925 LOGW("getAnalysisInfo failed, ret = %d", ret);
2926 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002927 }
2928 mSupportChannel = new QCamera3SupportChannel(
2929 mCameraHandle->camera_handle,
2930 mChannelHandle,
2931 mCameraHandle->ops,
2932 &gCamCapability[mCameraId]->padding_info,
2933 callbackFeatureMask,
2934 CAM_STREAM_TYPE_CALLBACK,
2935 &QCamera3SupportChannel::kDim,
2936 CAM_FORMAT_YUV_420_NV21,
2937 supportInfo.hw_analysis_supported,
2938 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002939 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002940 if (!mSupportChannel) {
2941 LOGE("dummy channel cannot be created");
2942 pthread_mutex_unlock(&mMutex);
2943 return -ENOMEM;
2944 }
2945 }
2946
2947 if (mSupportChannel) {
2948 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2949 QCamera3SupportChannel::kDim;
2950 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2951 CAM_STREAM_TYPE_CALLBACK;
2952 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2953 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2954 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2955 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2956 gCamCapability[mCameraId]->color_arrangement);
2957 mStreamConfigInfo.num_streams++;
2958 }
2959
2960 if (mRawDumpChannel) {
2961 cam_dimension_t rawSize;
2962 rawSize = getMaxRawSize(mCameraId);
2963 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2964 rawSize;
2965 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2966 CAM_STREAM_TYPE_RAW;
2967 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2968 CAM_QCOM_FEATURE_NONE;
2969 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2970 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2971 gCamCapability[mCameraId]->color_arrangement);
2972 mStreamConfigInfo.num_streams++;
2973 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002974
2975 if (mHdrPlusRawSrcChannel) {
2976 cam_dimension_t rawSize;
2977 rawSize = getMaxRawSize(mCameraId);
2978 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2979 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2980 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2981 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2982 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2983 gCamCapability[mCameraId]->color_arrangement);
2984 mStreamConfigInfo.num_streams++;
2985 }
2986
Thierry Strudel3d639192016-09-09 11:52:26 -07002987 /* In HFR mode, if video stream is not added, create a dummy channel so that
2988 * ISP can create a batch mode even for preview only case. This channel is
2989 * never 'start'ed (no stream-on), it is only 'initialized' */
2990 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2991 !m_bIsVideo) {
2992 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2993 setPAAFSupport(dummyFeatureMask,
2994 CAM_STREAM_TYPE_VIDEO,
2995 gCamCapability[mCameraId]->color_arrangement);
2996 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2997 mChannelHandle,
2998 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002999 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07003000 this,
3001 &mDummyBatchStream,
3002 CAM_STREAM_TYPE_VIDEO,
3003 dummyFeatureMask,
3004 mMetadataChannel);
3005 if (NULL == mDummyBatchChannel) {
3006 LOGE("creation of mDummyBatchChannel failed."
3007 "Preview will use non-hfr sensor mode ");
3008 }
3009 }
3010 if (mDummyBatchChannel) {
3011 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3012 mDummyBatchStream.width;
3013 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3014 mDummyBatchStream.height;
3015 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3016 CAM_STREAM_TYPE_VIDEO;
3017 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3018 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3019 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3020 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3021 gCamCapability[mCameraId]->color_arrangement);
3022 mStreamConfigInfo.num_streams++;
3023 }
3024
3025 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3026 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003027 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003028 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003029
3030 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3031 for (pendingRequestIterator i = mPendingRequestsList.begin();
3032 i != mPendingRequestsList.end();) {
3033 i = erasePendingRequest(i);
3034 }
3035 mPendingFrameDropList.clear();
3036 // Initialize/Reset the pending buffers list
3037 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3038 req.mPendingBufferList.clear();
3039 }
3040 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003041 mExpectedInflightDuration = 0;
3042 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003043
Thierry Strudel3d639192016-09-09 11:52:26 -07003044 mCurJpegMeta.clear();
3045 //Get min frame duration for this streams configuration
3046 deriveMinFrameDuration();
3047
Chien-Yu Chenee335912017-02-09 17:53:20 -08003048 mFirstPreviewIntentSeen = false;
3049
3050 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003051 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003052 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3053 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003054 disableHdrPlusModeLocked();
3055 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003056
Thierry Strudel3d639192016-09-09 11:52:26 -07003057 // Update state
3058 mState = CONFIGURED;
3059
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003060 mFirstMetadataCallback = true;
3061
Thierry Strudel3d639192016-09-09 11:52:26 -07003062 pthread_mutex_unlock(&mMutex);
3063
3064 return rc;
3065}
3066
3067/*===========================================================================
3068 * FUNCTION : validateCaptureRequest
3069 *
3070 * DESCRIPTION: validate a capture request from camera service
3071 *
3072 * PARAMETERS :
3073 * @request : request from framework to process
3074 *
3075 * RETURN :
3076 *
3077 *==========================================================================*/
3078int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003079 camera3_capture_request_t *request,
3080 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003081{
3082 ssize_t idx = 0;
3083 const camera3_stream_buffer_t *b;
3084 CameraMetadata meta;
3085
3086 /* Sanity check the request */
3087 if (request == NULL) {
3088 LOGE("NULL capture request");
3089 return BAD_VALUE;
3090 }
3091
3092 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3093 /*settings cannot be null for the first request*/
3094 return BAD_VALUE;
3095 }
3096
3097 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003098 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3099 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003100 LOGE("Request %d: No output buffers provided!",
3101 __FUNCTION__, frameNumber);
3102 return BAD_VALUE;
3103 }
3104 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3105 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3106 request->num_output_buffers, MAX_NUM_STREAMS);
3107 return BAD_VALUE;
3108 }
3109 if (request->input_buffer != NULL) {
3110 b = request->input_buffer;
3111 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3112 LOGE("Request %d: Buffer %ld: Status not OK!",
3113 frameNumber, (long)idx);
3114 return BAD_VALUE;
3115 }
3116 if (b->release_fence != -1) {
3117 LOGE("Request %d: Buffer %ld: Has a release fence!",
3118 frameNumber, (long)idx);
3119 return BAD_VALUE;
3120 }
3121 if (b->buffer == NULL) {
3122 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3123 frameNumber, (long)idx);
3124 return BAD_VALUE;
3125 }
3126 }
3127
3128 // Validate all buffers
3129 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003130 if (b == NULL) {
3131 return BAD_VALUE;
3132 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003133 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003134 QCamera3ProcessingChannel *channel =
3135 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3136 if (channel == NULL) {
3137 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3138 frameNumber, (long)idx);
3139 return BAD_VALUE;
3140 }
3141 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3142 LOGE("Request %d: Buffer %ld: Status not OK!",
3143 frameNumber, (long)idx);
3144 return BAD_VALUE;
3145 }
3146 if (b->release_fence != -1) {
3147 LOGE("Request %d: Buffer %ld: Has a release fence!",
3148 frameNumber, (long)idx);
3149 return BAD_VALUE;
3150 }
3151 if (b->buffer == NULL) {
3152 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3153 frameNumber, (long)idx);
3154 return BAD_VALUE;
3155 }
3156 if (*(b->buffer) == NULL) {
3157 LOGE("Request %d: Buffer %ld: NULL private handle!",
3158 frameNumber, (long)idx);
3159 return BAD_VALUE;
3160 }
3161 idx++;
3162 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003163 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003164 return NO_ERROR;
3165}
3166
3167/*===========================================================================
3168 * FUNCTION : deriveMinFrameDuration
3169 *
3170 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3171 * on currently configured streams.
3172 *
3173 * PARAMETERS : NONE
3174 *
3175 * RETURN : NONE
3176 *
3177 *==========================================================================*/
3178void QCamera3HardwareInterface::deriveMinFrameDuration()
3179{
3180 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003181 bool hasRaw = false;
3182
3183 mMinRawFrameDuration = 0;
3184 mMinJpegFrameDuration = 0;
3185 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003186
3187 maxJpegDim = 0;
3188 maxProcessedDim = 0;
3189 maxRawDim = 0;
3190
3191 // Figure out maximum jpeg, processed, and raw dimensions
3192 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3193 it != mStreamInfo.end(); it++) {
3194
3195 // Input stream doesn't have valid stream_type
3196 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3197 continue;
3198
3199 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3200 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3201 if (dimension > maxJpegDim)
3202 maxJpegDim = dimension;
3203 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3204 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3205 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003206 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003207 if (dimension > maxRawDim)
3208 maxRawDim = dimension;
3209 } else {
3210 if (dimension > maxProcessedDim)
3211 maxProcessedDim = dimension;
3212 }
3213 }
3214
3215 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3216 MAX_SIZES_CNT);
3217
3218 //Assume all jpeg dimensions are in processed dimensions.
3219 if (maxJpegDim > maxProcessedDim)
3220 maxProcessedDim = maxJpegDim;
3221 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003222 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003223 maxRawDim = INT32_MAX;
3224
3225 for (size_t i = 0; i < count; i++) {
3226 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3227 gCamCapability[mCameraId]->raw_dim[i].height;
3228 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3229 maxRawDim = dimension;
3230 }
3231 }
3232
3233 //Find minimum durations for processed, jpeg, and raw
3234 for (size_t i = 0; i < count; i++) {
3235 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3236 gCamCapability[mCameraId]->raw_dim[i].height) {
3237 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3238 break;
3239 }
3240 }
3241 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3242 for (size_t i = 0; i < count; i++) {
3243 if (maxProcessedDim ==
3244 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3245 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3246 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3247 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3248 break;
3249 }
3250 }
3251}
3252
3253/*===========================================================================
3254 * FUNCTION : getMinFrameDuration
3255 *
3256 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3257 * and current request configuration.
3258 *
3259 * PARAMETERS : @request: requset sent by the frameworks
3260 *
3261 * RETURN : min farme duration for a particular request
3262 *
3263 *==========================================================================*/
3264int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3265{
3266 bool hasJpegStream = false;
3267 bool hasRawStream = false;
3268 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3269 const camera3_stream_t *stream = request->output_buffers[i].stream;
3270 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3271 hasJpegStream = true;
3272 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3273 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3274 stream->format == HAL_PIXEL_FORMAT_RAW16)
3275 hasRawStream = true;
3276 }
3277
3278 if (!hasJpegStream)
3279 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3280 else
3281 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3282}
3283
3284/*===========================================================================
3285 * FUNCTION : handleBuffersDuringFlushLock
3286 *
3287 * DESCRIPTION: Account for buffers returned from back-end during flush
3288 * This function is executed while mMutex is held by the caller.
3289 *
3290 * PARAMETERS :
3291 * @buffer: image buffer for the callback
3292 *
3293 * RETURN :
3294 *==========================================================================*/
3295void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3296{
3297 bool buffer_found = false;
3298 for (List<PendingBuffersInRequest>::iterator req =
3299 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3300 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3301 for (List<PendingBufferInfo>::iterator i =
3302 req->mPendingBufferList.begin();
3303 i != req->mPendingBufferList.end(); i++) {
3304 if (i->buffer == buffer->buffer) {
3305 mPendingBuffersMap.numPendingBufsAtFlush--;
3306 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3307 buffer->buffer, req->frame_number,
3308 mPendingBuffersMap.numPendingBufsAtFlush);
3309 buffer_found = true;
3310 break;
3311 }
3312 }
3313 if (buffer_found) {
3314 break;
3315 }
3316 }
3317 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3318 //signal the flush()
3319 LOGD("All buffers returned to HAL. Continue flush");
3320 pthread_cond_signal(&mBuffersCond);
3321 }
3322}
3323
Thierry Strudel3d639192016-09-09 11:52:26 -07003324/*===========================================================================
3325 * FUNCTION : handleBatchMetadata
3326 *
3327 * DESCRIPTION: Handles metadata buffer callback in batch mode
3328 *
3329 * PARAMETERS : @metadata_buf: metadata buffer
3330 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3331 * the meta buf in this method
3332 *
3333 * RETURN :
3334 *
3335 *==========================================================================*/
3336void QCamera3HardwareInterface::handleBatchMetadata(
3337 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3338{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003339 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003340
3341 if (NULL == metadata_buf) {
3342 LOGE("metadata_buf is NULL");
3343 return;
3344 }
3345 /* In batch mode, the metdata will contain the frame number and timestamp of
3346 * the last frame in the batch. Eg: a batch containing buffers from request
3347 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3348 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3349 * multiple process_capture_results */
3350 metadata_buffer_t *metadata =
3351 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3352 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3353 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3354 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3355 uint32_t frame_number = 0, urgent_frame_number = 0;
3356 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3357 bool invalid_metadata = false;
3358 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3359 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003360 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003361
3362 int32_t *p_frame_number_valid =
3363 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3364 uint32_t *p_frame_number =
3365 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3366 int64_t *p_capture_time =
3367 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3368 int32_t *p_urgent_frame_number_valid =
3369 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3370 uint32_t *p_urgent_frame_number =
3371 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3372
3373 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3374 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3375 (NULL == p_urgent_frame_number)) {
3376 LOGE("Invalid metadata");
3377 invalid_metadata = true;
3378 } else {
3379 frame_number_valid = *p_frame_number_valid;
3380 last_frame_number = *p_frame_number;
3381 last_frame_capture_time = *p_capture_time;
3382 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3383 last_urgent_frame_number = *p_urgent_frame_number;
3384 }
3385
3386 /* In batchmode, when no video buffers are requested, set_parms are sent
3387 * for every capture_request. The difference between consecutive urgent
3388 * frame numbers and frame numbers should be used to interpolate the
3389 * corresponding frame numbers and time stamps */
3390 pthread_mutex_lock(&mMutex);
3391 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003392 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3393 if(idx < 0) {
3394 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3395 last_urgent_frame_number);
3396 mState = ERROR;
3397 pthread_mutex_unlock(&mMutex);
3398 return;
3399 }
3400 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003401 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3402 first_urgent_frame_number;
3403
3404 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3405 urgent_frame_number_valid,
3406 first_urgent_frame_number, last_urgent_frame_number);
3407 }
3408
3409 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003410 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3411 if(idx < 0) {
3412 LOGE("Invalid frame number received: %d. Irrecoverable error",
3413 last_frame_number);
3414 mState = ERROR;
3415 pthread_mutex_unlock(&mMutex);
3416 return;
3417 }
3418 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003419 frameNumDiff = last_frame_number + 1 -
3420 first_frame_number;
3421 mPendingBatchMap.removeItem(last_frame_number);
3422
3423 LOGD("frm: valid: %d frm_num: %d - %d",
3424 frame_number_valid,
3425 first_frame_number, last_frame_number);
3426
3427 }
3428 pthread_mutex_unlock(&mMutex);
3429
3430 if (urgent_frame_number_valid || frame_number_valid) {
3431 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3432 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3433 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3434 urgentFrameNumDiff, last_urgent_frame_number);
3435 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3436 LOGE("frameNumDiff: %d frameNum: %d",
3437 frameNumDiff, last_frame_number);
3438 }
3439
3440 for (size_t i = 0; i < loopCount; i++) {
3441 /* handleMetadataWithLock is called even for invalid_metadata for
3442 * pipeline depth calculation */
3443 if (!invalid_metadata) {
3444 /* Infer frame number. Batch metadata contains frame number of the
3445 * last frame */
3446 if (urgent_frame_number_valid) {
3447 if (i < urgentFrameNumDiff) {
3448 urgent_frame_number =
3449 first_urgent_frame_number + i;
3450 LOGD("inferred urgent frame_number: %d",
3451 urgent_frame_number);
3452 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3453 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3454 } else {
3455 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3456 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3457 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3458 }
3459 }
3460
3461 /* Infer frame number. Batch metadata contains frame number of the
3462 * last frame */
3463 if (frame_number_valid) {
3464 if (i < frameNumDiff) {
3465 frame_number = first_frame_number + i;
3466 LOGD("inferred frame_number: %d", frame_number);
3467 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3468 CAM_INTF_META_FRAME_NUMBER, frame_number);
3469 } else {
3470 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3471 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3472 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3473 }
3474 }
3475
3476 if (last_frame_capture_time) {
3477 //Infer timestamp
3478 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003479 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003480 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003481 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003482 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3483 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3484 LOGD("batch capture_time: %lld, capture_time: %lld",
3485 last_frame_capture_time, capture_time);
3486 }
3487 }
3488 pthread_mutex_lock(&mMutex);
3489 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003490 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003491 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3492 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003493 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003494 pthread_mutex_unlock(&mMutex);
3495 }
3496
3497 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003498 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003499 mMetadataChannel->bufDone(metadata_buf);
3500 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003501 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003502 }
3503}
3504
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003505void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3506 camera3_error_msg_code_t errorCode)
3507{
3508 camera3_notify_msg_t notify_msg;
3509 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3510 notify_msg.type = CAMERA3_MSG_ERROR;
3511 notify_msg.message.error.error_code = errorCode;
3512 notify_msg.message.error.error_stream = NULL;
3513 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003514 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003515
3516 return;
3517}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003518
3519/*===========================================================================
3520 * FUNCTION : sendPartialMetadataWithLock
3521 *
3522 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3523 *
3524 * PARAMETERS : @metadata: metadata buffer
3525 * @requestIter: The iterator for the pending capture request for
3526 * which the partial result is being sen
3527 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3528 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003529 * @isJumpstartMetadata: Whether this is a partial metadata for
3530 * jumpstart, i.e. even though it doesn't map to a valid partial
3531 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003532 *
3533 * RETURN :
3534 *
3535 *==========================================================================*/
3536
3537void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3538 metadata_buffer_t *metadata,
3539 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003540 bool lastUrgentMetadataInBatch,
3541 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003542{
3543 camera3_capture_result_t result;
3544 memset(&result, 0, sizeof(camera3_capture_result_t));
3545
3546 requestIter->partial_result_cnt++;
3547
3548 // Extract 3A metadata
3549 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003550 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3551 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003552 // Populate metadata result
3553 result.frame_number = requestIter->frame_number;
3554 result.num_output_buffers = 0;
3555 result.output_buffers = NULL;
3556 result.partial_result = requestIter->partial_result_cnt;
3557
3558 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003559 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003560 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3561 // Notify HDR+ client about the partial metadata.
3562 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3563 result.partial_result == PARTIAL_RESULT_COUNT);
3564 }
3565 }
3566
3567 orchestrateResult(&result);
3568 LOGD("urgent frame_number = %u", result.frame_number);
3569 free_camera_metadata((camera_metadata_t *)result.result);
3570}
3571
Thierry Strudel3d639192016-09-09 11:52:26 -07003572/*===========================================================================
3573 * FUNCTION : handleMetadataWithLock
3574 *
3575 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3576 *
3577 * PARAMETERS : @metadata_buf: metadata buffer
3578 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3579 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003580 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3581 * last urgent metadata in a batch. Always true for non-batch mode
3582 * @lastMetadataInBatch: Boolean to indicate whether this is the
3583 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003584 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3585 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003586 *
3587 * RETURN :
3588 *
3589 *==========================================================================*/
3590void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003591 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003592 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3593 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003594{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003595 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003596 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3597 //during flush do not send metadata from this thread
3598 LOGD("not sending metadata during flush or when mState is error");
3599 if (free_and_bufdone_meta_buf) {
3600 mMetadataChannel->bufDone(metadata_buf);
3601 free(metadata_buf);
3602 }
3603 return;
3604 }
3605
3606 //not in flush
3607 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3608 int32_t frame_number_valid, urgent_frame_number_valid;
3609 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003610 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003611 nsecs_t currentSysTime;
3612
3613 int32_t *p_frame_number_valid =
3614 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3615 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3616 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003617 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003618 int32_t *p_urgent_frame_number_valid =
3619 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3620 uint32_t *p_urgent_frame_number =
3621 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3622 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3623 metadata) {
3624 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3625 *p_frame_number_valid, *p_frame_number);
3626 }
3627
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003628 camera_metadata_t *resultMetadata = nullptr;
3629
Thierry Strudel3d639192016-09-09 11:52:26 -07003630 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3631 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3632 LOGE("Invalid metadata");
3633 if (free_and_bufdone_meta_buf) {
3634 mMetadataChannel->bufDone(metadata_buf);
3635 free(metadata_buf);
3636 }
3637 goto done_metadata;
3638 }
3639 frame_number_valid = *p_frame_number_valid;
3640 frame_number = *p_frame_number;
3641 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003642 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003643 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3644 urgent_frame_number = *p_urgent_frame_number;
3645 currentSysTime = systemTime(CLOCK_MONOTONIC);
3646
Jason Lee603176d2017-05-31 11:43:27 -07003647 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3648 const int tries = 3;
3649 nsecs_t bestGap, measured;
3650 for (int i = 0; i < tries; ++i) {
3651 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3652 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3653 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3654 const nsecs_t gap = tmono2 - tmono;
3655 if (i == 0 || gap < bestGap) {
3656 bestGap = gap;
3657 measured = tbase - ((tmono + tmono2) >> 1);
3658 }
3659 }
3660 capture_time -= measured;
3661 }
3662
Thierry Strudel3d639192016-09-09 11:52:26 -07003663 // Detect if buffers from any requests are overdue
3664 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003665 int64_t timeout;
3666 {
3667 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3668 // If there is a pending HDR+ request, the following requests may be blocked until the
3669 // HDR+ request is done. So allow a longer timeout.
3670 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3671 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003672 if (timeout < mExpectedInflightDuration) {
3673 timeout = mExpectedInflightDuration;
3674 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003675 }
3676
3677 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003678 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003679 assert(missed.stream->priv);
3680 if (missed.stream->priv) {
3681 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3682 assert(ch->mStreams[0]);
3683 if (ch->mStreams[0]) {
3684 LOGE("Cancel missing frame = %d, buffer = %p,"
3685 "stream type = %d, stream format = %d",
3686 req.frame_number, missed.buffer,
3687 ch->mStreams[0]->getMyType(), missed.stream->format);
3688 ch->timeoutFrame(req.frame_number);
3689 }
3690 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003691 }
3692 }
3693 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003694 //For the very first metadata callback, regardless whether it contains valid
3695 //frame number, send the partial metadata for the jumpstarting requests.
3696 //Note that this has to be done even if the metadata doesn't contain valid
3697 //urgent frame number, because in the case only 1 request is ever submitted
3698 //to HAL, there won't be subsequent valid urgent frame number.
3699 if (mFirstMetadataCallback) {
3700 for (pendingRequestIterator i =
3701 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3702 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003703 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3704 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003705 }
3706 }
3707 mFirstMetadataCallback = false;
3708 }
3709
Thierry Strudel3d639192016-09-09 11:52:26 -07003710 //Partial result on process_capture_result for timestamp
3711 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003712 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003713
3714 //Recieved an urgent Frame Number, handle it
3715 //using partial results
3716 for (pendingRequestIterator i =
3717 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3718 LOGD("Iterator Frame = %d urgent frame = %d",
3719 i->frame_number, urgent_frame_number);
3720
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003721 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003722 (i->partial_result_cnt == 0)) {
3723 LOGE("Error: HAL missed urgent metadata for frame number %d",
3724 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003725 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003726 }
3727
3728 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003729 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003730 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3731 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003732 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3733 // Instant AEC settled for this frame.
3734 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3735 mInstantAECSettledFrameNumber = urgent_frame_number;
3736 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003737 break;
3738 }
3739 }
3740 }
3741
3742 if (!frame_number_valid) {
3743 LOGD("Not a valid normal frame number, used as SOF only");
3744 if (free_and_bufdone_meta_buf) {
3745 mMetadataChannel->bufDone(metadata_buf);
3746 free(metadata_buf);
3747 }
3748 goto done_metadata;
3749 }
3750 LOGH("valid frame_number = %u, capture_time = %lld",
3751 frame_number, capture_time);
3752
Emilian Peev4e0fe952017-06-30 12:40:09 -07003753 handleDepthDataLocked(metadata->depth_data, frame_number,
3754 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003755
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003756 // Check whether any stream buffer corresponding to this is dropped or not
3757 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3758 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3759 for (auto & pendingRequest : mPendingRequestsList) {
3760 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3761 mInstantAECSettledFrameNumber)) {
3762 camera3_notify_msg_t notify_msg = {};
3763 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003764 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003765 QCamera3ProcessingChannel *channel =
3766 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003767 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003768 if (p_cam_frame_drop) {
3769 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003770 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003771 // Got the stream ID for drop frame.
3772 dropFrame = true;
3773 break;
3774 }
3775 }
3776 } else {
3777 // This is instant AEC case.
3778 // For instant AEC drop the stream untill AEC is settled.
3779 dropFrame = true;
3780 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003781
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003782 if (dropFrame) {
3783 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3784 if (p_cam_frame_drop) {
3785 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003786 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003787 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003788 } else {
3789 // For instant AEC, inform frame drop and frame number
3790 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3791 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003792 pendingRequest.frame_number, streamID,
3793 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003794 }
3795 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003796 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003797 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003798 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003799 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003800 if (p_cam_frame_drop) {
3801 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003802 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003803 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003804 } else {
3805 // For instant AEC, inform frame drop and frame number
3806 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3807 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003808 pendingRequest.frame_number, streamID,
3809 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003810 }
3811 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003812 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003813 PendingFrameDrop.stream_ID = streamID;
3814 // Add the Frame drop info to mPendingFrameDropList
3815 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003816 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003817 }
3818 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003819 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003820
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003821 for (auto & pendingRequest : mPendingRequestsList) {
3822 // Find the pending request with the frame number.
3823 if (pendingRequest.frame_number == frame_number) {
3824 // Update the sensor timestamp.
3825 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003826
Thierry Strudel3d639192016-09-09 11:52:26 -07003827
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003828 /* Set the timestamp in display metadata so that clients aware of
3829 private_handle such as VT can use this un-modified timestamps.
3830 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003831 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003832
Thierry Strudel3d639192016-09-09 11:52:26 -07003833 // Find channel requiring metadata, meaning internal offline postprocess
3834 // is needed.
3835 //TODO: for now, we don't support two streams requiring metadata at the same time.
3836 // (because we are not making copies, and metadata buffer is not reference counted.
3837 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003838 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3839 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003840 if (iter->need_metadata) {
3841 internalPproc = true;
3842 QCamera3ProcessingChannel *channel =
3843 (QCamera3ProcessingChannel *)iter->stream->priv;
3844 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003845 if(p_is_metabuf_queued != NULL) {
3846 *p_is_metabuf_queued = true;
3847 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003848 break;
3849 }
3850 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003851 for (auto itr = pendingRequest.internalRequestList.begin();
3852 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003853 if (itr->need_metadata) {
3854 internalPproc = true;
3855 QCamera3ProcessingChannel *channel =
3856 (QCamera3ProcessingChannel *)itr->stream->priv;
3857 channel->queueReprocMetadata(metadata_buf);
3858 break;
3859 }
3860 }
3861
Thierry Strudel54dc9782017-02-15 12:12:10 -08003862 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003863
3864 bool *enableZsl = nullptr;
3865 if (gExposeEnableZslKey) {
3866 enableZsl = &pendingRequest.enableZsl;
3867 }
3868
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003869 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003870 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003871 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003872
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003873 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003874
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003875 if (pendingRequest.blob_request) {
3876 //Dump tuning metadata if enabled and available
3877 char prop[PROPERTY_VALUE_MAX];
3878 memset(prop, 0, sizeof(prop));
3879 property_get("persist.camera.dumpmetadata", prop, "0");
3880 int32_t enabled = atoi(prop);
3881 if (enabled && metadata->is_tuning_params_valid) {
3882 dumpMetadataToFile(metadata->tuning_params,
3883 mMetaFrameCount,
3884 enabled,
3885 "Snapshot",
3886 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003887 }
3888 }
3889
3890 if (!internalPproc) {
3891 LOGD("couldn't find need_metadata for this metadata");
3892 // Return metadata buffer
3893 if (free_and_bufdone_meta_buf) {
3894 mMetadataChannel->bufDone(metadata_buf);
3895 free(metadata_buf);
3896 }
3897 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003898
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003899 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003900 }
3901 }
3902
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003903 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3904
3905 // Try to send out capture result metadata.
3906 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003907 return;
3908
Thierry Strudel3d639192016-09-09 11:52:26 -07003909done_metadata:
3910 for (pendingRequestIterator i = mPendingRequestsList.begin();
3911 i != mPendingRequestsList.end() ;i++) {
3912 i->pipeline_depth++;
3913 }
3914 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3915 unblockRequestIfNecessary();
3916}
3917
3918/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003919 * FUNCTION : handleDepthDataWithLock
3920 *
3921 * DESCRIPTION: Handles incoming depth data
3922 *
3923 * PARAMETERS : @depthData : Depth data
3924 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003925 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003926 *
3927 * RETURN :
3928 *
3929 *==========================================================================*/
3930void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003931 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003932 uint32_t currentFrameNumber;
3933 buffer_handle_t *depthBuffer;
3934
3935 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003936 return;
3937 }
3938
3939 camera3_stream_buffer_t resultBuffer =
3940 {.acquire_fence = -1,
3941 .release_fence = -1,
3942 .status = CAMERA3_BUFFER_STATUS_OK,
3943 .buffer = nullptr,
3944 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003945 do {
3946 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3947 if (nullptr == depthBuffer) {
3948 break;
3949 }
3950
Emilian Peev7650c122017-01-19 08:24:33 -08003951 resultBuffer.buffer = depthBuffer;
3952 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003953 if (valid) {
3954 int32_t rc = mDepthChannel->populateDepthData(depthData,
3955 frameNumber);
3956 if (NO_ERROR != rc) {
3957 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3958 } else {
3959 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3960 }
Emilian Peev7650c122017-01-19 08:24:33 -08003961 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003962 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003963 }
3964 } else if (currentFrameNumber > frameNumber) {
3965 break;
3966 } else {
3967 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3968 {{currentFrameNumber, mDepthChannel->getStream(),
3969 CAMERA3_MSG_ERROR_BUFFER}}};
3970 orchestrateNotify(&notify_msg);
3971
3972 LOGE("Depth buffer for frame number: %d is missing "
3973 "returning back!", currentFrameNumber);
3974 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3975 }
3976 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003977 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003978 } while (currentFrameNumber < frameNumber);
3979}
3980
3981/*===========================================================================
3982 * FUNCTION : notifyErrorFoPendingDepthData
3983 *
3984 * DESCRIPTION: Returns error for any pending depth buffers
3985 *
3986 * PARAMETERS : depthCh - depth channel that needs to get flushed
3987 *
3988 * RETURN :
3989 *
3990 *==========================================================================*/
3991void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3992 QCamera3DepthChannel *depthCh) {
3993 uint32_t currentFrameNumber;
3994 buffer_handle_t *depthBuffer;
3995
3996 if (nullptr == depthCh) {
3997 return;
3998 }
3999
4000 camera3_notify_msg_t notify_msg =
4001 {.type = CAMERA3_MSG_ERROR,
4002 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4003 camera3_stream_buffer_t resultBuffer =
4004 {.acquire_fence = -1,
4005 .release_fence = -1,
4006 .buffer = nullptr,
4007 .stream = depthCh->getStream(),
4008 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004009
4010 while (nullptr !=
4011 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4012 depthCh->unmapBuffer(currentFrameNumber);
4013
4014 notify_msg.message.error.frame_number = currentFrameNumber;
4015 orchestrateNotify(&notify_msg);
4016
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004017 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004018 };
4019}
4020
4021/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004022 * FUNCTION : hdrPlusPerfLock
4023 *
4024 * DESCRIPTION: perf lock for HDR+ using custom intent
4025 *
4026 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4027 *
4028 * RETURN : None
4029 *
4030 *==========================================================================*/
4031void QCamera3HardwareInterface::hdrPlusPerfLock(
4032 mm_camera_super_buf_t *metadata_buf)
4033{
4034 if (NULL == metadata_buf) {
4035 LOGE("metadata_buf is NULL");
4036 return;
4037 }
4038 metadata_buffer_t *metadata =
4039 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4040 int32_t *p_frame_number_valid =
4041 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4042 uint32_t *p_frame_number =
4043 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4044
4045 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4046 LOGE("%s: Invalid metadata", __func__);
4047 return;
4048 }
4049
Wei Wang01385482017-08-03 10:49:34 -07004050 //acquire perf lock for 2 secs after the last HDR frame is captured
4051 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004052 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4053 if ((p_frame_number != NULL) &&
4054 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004055 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004056 }
4057 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004058}
4059
4060/*===========================================================================
4061 * FUNCTION : handleInputBufferWithLock
4062 *
4063 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4064 *
4065 * PARAMETERS : @frame_number: frame number of the input buffer
4066 *
4067 * RETURN :
4068 *
4069 *==========================================================================*/
4070void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4071{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004072 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004073 pendingRequestIterator i = mPendingRequestsList.begin();
4074 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4075 i++;
4076 }
4077 if (i != mPendingRequestsList.end() && i->input_buffer) {
4078 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004079 CameraMetadata settings;
4080 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4081 if(i->settings) {
4082 settings = i->settings;
4083 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4084 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004085 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004086 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004087 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004088 } else {
4089 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004090 }
4091
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004092 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4093 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4094 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004095
4096 camera3_capture_result result;
4097 memset(&result, 0, sizeof(camera3_capture_result));
4098 result.frame_number = frame_number;
4099 result.result = i->settings;
4100 result.input_buffer = i->input_buffer;
4101 result.partial_result = PARTIAL_RESULT_COUNT;
4102
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004103 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004104 LOGD("Input request metadata and input buffer frame_number = %u",
4105 i->frame_number);
4106 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004107
4108 // Dispatch result metadata that may be just unblocked by this reprocess result.
4109 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004110 } else {
4111 LOGE("Could not find input request for frame number %d", frame_number);
4112 }
4113}
4114
4115/*===========================================================================
4116 * FUNCTION : handleBufferWithLock
4117 *
4118 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4119 *
4120 * PARAMETERS : @buffer: image buffer for the callback
4121 * @frame_number: frame number of the image buffer
4122 *
4123 * RETURN :
4124 *
4125 *==========================================================================*/
4126void QCamera3HardwareInterface::handleBufferWithLock(
4127 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4128{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004129 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004130
4131 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4132 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4133 }
4134
Thierry Strudel3d639192016-09-09 11:52:26 -07004135 /* Nothing to be done during error state */
4136 if ((ERROR == mState) || (DEINIT == mState)) {
4137 return;
4138 }
4139 if (mFlushPerf) {
4140 handleBuffersDuringFlushLock(buffer);
4141 return;
4142 }
4143 //not in flush
4144 // If the frame number doesn't exist in the pending request list,
4145 // directly send the buffer to the frameworks, and update pending buffers map
4146 // Otherwise, book-keep the buffer.
4147 pendingRequestIterator i = mPendingRequestsList.begin();
4148 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4149 i++;
4150 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004151
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004152 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004153 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004154 // For a reprocessing request, try to send out result metadata.
4155 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004156 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004157 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004158
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004159 // Check if this frame was dropped.
4160 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4161 m != mPendingFrameDropList.end(); m++) {
4162 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4163 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4164 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4165 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4166 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4167 frame_number, streamID);
4168 m = mPendingFrameDropList.erase(m);
4169 break;
4170 }
4171 }
4172
4173 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4174 LOGH("result frame_number = %d, buffer = %p",
4175 frame_number, buffer->buffer);
4176
4177 mPendingBuffersMap.removeBuf(buffer->buffer);
4178 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4179
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004180 if (mPreviewStarted == false) {
4181 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4182 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004183 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4184
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004185 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4186 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4187 mPreviewStarted = true;
4188
4189 // Set power hint for preview
4190 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4191 }
4192 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004193}
4194
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004195void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004196 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004197{
4198 // Find the pending request for this result metadata.
4199 auto requestIter = mPendingRequestsList.begin();
4200 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4201 requestIter++;
4202 }
4203
4204 if (requestIter == mPendingRequestsList.end()) {
4205 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4206 return;
4207 }
4208
4209 // Update the result metadata
4210 requestIter->resultMetadata = resultMetadata;
4211
4212 // Check what type of request this is.
4213 bool liveRequest = false;
4214 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004215 // HDR+ request doesn't have partial results.
4216 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004217 } else if (requestIter->input_buffer != nullptr) {
4218 // Reprocessing request result is the same as settings.
4219 requestIter->resultMetadata = requestIter->settings;
4220 // Reprocessing request doesn't have partial results.
4221 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4222 } else {
4223 liveRequest = true;
Chien-Yu Chen0a921f92017-08-27 17:25:33 -07004224 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004225 mPendingLiveRequest--;
4226
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004227 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004228 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004229 // For a live request, send the metadata to HDR+ client.
4230 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4231 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4232 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4233 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004234 }
4235 }
4236
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004237 // Remove len shading map if it's not requested.
4238 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4239 CameraMetadata metadata;
4240 metadata.acquire(resultMetadata);
4241 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4242 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4243 &requestIter->requestedLensShadingMapMode, 1);
4244
4245 requestIter->resultMetadata = metadata.release();
4246 }
4247
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004248 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4249}
4250
4251void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4252 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004253 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4254 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004255 bool readyToSend = true;
4256
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004257 // Iterate through the pending requests to send out result metadata that are ready. Also if
4258 // this result metadata belongs to a live request, notify errors for previous live requests
4259 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004260 auto iter = mPendingRequestsList.begin();
4261 while (iter != mPendingRequestsList.end()) {
4262 // Check if current pending request is ready. If it's not ready, the following pending
4263 // requests are also not ready.
4264 if (readyToSend && iter->resultMetadata == nullptr) {
4265 readyToSend = false;
4266 }
4267
4268 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4269
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004270 camera3_capture_result_t result = {};
4271 result.frame_number = iter->frame_number;
4272 result.result = iter->resultMetadata;
4273 result.partial_result = iter->partial_result_cnt;
4274
4275 // If this pending buffer has result metadata, we may be able to send out shutter callback
4276 // and result metadata.
4277 if (iter->resultMetadata != nullptr) {
4278 if (!readyToSend) {
4279 // If any of the previous pending request is not ready, this pending request is
4280 // also not ready to send in order to keep shutter callbacks and result metadata
4281 // in order.
4282 iter++;
4283 continue;
4284 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004285 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004286 // If the result metadata belongs to a live request, notify errors for previous pending
4287 // live requests.
4288 mPendingLiveRequest--;
4289
4290 CameraMetadata dummyMetadata;
4291 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4292 result.result = dummyMetadata.release();
4293
4294 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004295
4296 // partial_result should be PARTIAL_RESULT_CNT in case of
4297 // ERROR_RESULT.
4298 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4299 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004300 } else {
4301 iter++;
4302 continue;
4303 }
4304
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004305 result.output_buffers = nullptr;
4306 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004307 orchestrateResult(&result);
4308
4309 // For reprocessing, result metadata is the same as settings so do not free it here to
4310 // avoid double free.
4311 if (result.result != iter->settings) {
4312 free_camera_metadata((camera_metadata_t *)result.result);
4313 }
4314 iter->resultMetadata = nullptr;
4315 iter = erasePendingRequest(iter);
4316 }
4317
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004318 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004319 for (auto &iter : mPendingRequestsList) {
4320 // Increment pipeline depth for the following pending requests.
4321 if (iter.frame_number > frameNumber) {
4322 iter.pipeline_depth++;
4323 }
4324 }
4325 }
4326
4327 unblockRequestIfNecessary();
4328}
4329
Thierry Strudel3d639192016-09-09 11:52:26 -07004330/*===========================================================================
4331 * FUNCTION : unblockRequestIfNecessary
4332 *
4333 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4334 * that mMutex is held when this function is called.
4335 *
4336 * PARAMETERS :
4337 *
4338 * RETURN :
4339 *
4340 *==========================================================================*/
4341void QCamera3HardwareInterface::unblockRequestIfNecessary()
4342{
4343 // Unblock process_capture_request
4344 pthread_cond_signal(&mRequestCond);
4345}
4346
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004347/*===========================================================================
4348 * FUNCTION : isHdrSnapshotRequest
4349 *
4350 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4351 *
4352 * PARAMETERS : camera3 request structure
4353 *
4354 * RETURN : boolean decision variable
4355 *
4356 *==========================================================================*/
4357bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4358{
4359 if (request == NULL) {
4360 LOGE("Invalid request handle");
4361 assert(0);
4362 return false;
4363 }
4364
4365 if (!mForceHdrSnapshot) {
4366 CameraMetadata frame_settings;
4367 frame_settings = request->settings;
4368
4369 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4370 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4371 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4372 return false;
4373 }
4374 } else {
4375 return false;
4376 }
4377
4378 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4379 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4380 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4381 return false;
4382 }
4383 } else {
4384 return false;
4385 }
4386 }
4387
4388 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4389 if (request->output_buffers[i].stream->format
4390 == HAL_PIXEL_FORMAT_BLOB) {
4391 return true;
4392 }
4393 }
4394
4395 return false;
4396}
4397/*===========================================================================
4398 * FUNCTION : orchestrateRequest
4399 *
4400 * DESCRIPTION: Orchestrates a capture request from camera service
4401 *
4402 * PARAMETERS :
4403 * @request : request from framework to process
4404 *
4405 * RETURN : Error status codes
4406 *
4407 *==========================================================================*/
4408int32_t QCamera3HardwareInterface::orchestrateRequest(
4409 camera3_capture_request_t *request)
4410{
4411
4412 uint32_t originalFrameNumber = request->frame_number;
4413 uint32_t originalOutputCount = request->num_output_buffers;
4414 const camera_metadata_t *original_settings = request->settings;
4415 List<InternalRequest> internallyRequestedStreams;
4416 List<InternalRequest> emptyInternalList;
4417
4418 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4419 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4420 uint32_t internalFrameNumber;
4421 CameraMetadata modified_meta;
4422
4423
4424 /* Add Blob channel to list of internally requested streams */
4425 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4426 if (request->output_buffers[i].stream->format
4427 == HAL_PIXEL_FORMAT_BLOB) {
4428 InternalRequest streamRequested;
4429 streamRequested.meteringOnly = 1;
4430 streamRequested.need_metadata = 0;
4431 streamRequested.stream = request->output_buffers[i].stream;
4432 internallyRequestedStreams.push_back(streamRequested);
4433 }
4434 }
4435 request->num_output_buffers = 0;
4436 auto itr = internallyRequestedStreams.begin();
4437
4438 /* Modify setting to set compensation */
4439 modified_meta = request->settings;
4440 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4441 uint8_t aeLock = 1;
4442 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4443 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4444 camera_metadata_t *modified_settings = modified_meta.release();
4445 request->settings = modified_settings;
4446
4447 /* Capture Settling & -2x frame */
4448 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4449 request->frame_number = internalFrameNumber;
4450 processCaptureRequest(request, internallyRequestedStreams);
4451
4452 request->num_output_buffers = originalOutputCount;
4453 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4454 request->frame_number = internalFrameNumber;
4455 processCaptureRequest(request, emptyInternalList);
4456 request->num_output_buffers = 0;
4457
4458 modified_meta = modified_settings;
4459 expCompensation = 0;
4460 aeLock = 1;
4461 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4462 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4463 modified_settings = modified_meta.release();
4464 request->settings = modified_settings;
4465
4466 /* Capture Settling & 0X frame */
4467
4468 itr = internallyRequestedStreams.begin();
4469 if (itr == internallyRequestedStreams.end()) {
4470 LOGE("Error Internally Requested Stream list is empty");
4471 assert(0);
4472 } else {
4473 itr->need_metadata = 0;
4474 itr->meteringOnly = 1;
4475 }
4476
4477 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4478 request->frame_number = internalFrameNumber;
4479 processCaptureRequest(request, internallyRequestedStreams);
4480
4481 itr = internallyRequestedStreams.begin();
4482 if (itr == internallyRequestedStreams.end()) {
4483 ALOGE("Error Internally Requested Stream list is empty");
4484 assert(0);
4485 } else {
4486 itr->need_metadata = 1;
4487 itr->meteringOnly = 0;
4488 }
4489
4490 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4491 request->frame_number = internalFrameNumber;
4492 processCaptureRequest(request, internallyRequestedStreams);
4493
4494 /* Capture 2X frame*/
4495 modified_meta = modified_settings;
4496 expCompensation = GB_HDR_2X_STEP_EV;
4497 aeLock = 1;
4498 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4499 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4500 modified_settings = modified_meta.release();
4501 request->settings = modified_settings;
4502
4503 itr = internallyRequestedStreams.begin();
4504 if (itr == internallyRequestedStreams.end()) {
4505 ALOGE("Error Internally Requested Stream list is empty");
4506 assert(0);
4507 } else {
4508 itr->need_metadata = 0;
4509 itr->meteringOnly = 1;
4510 }
4511 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4512 request->frame_number = internalFrameNumber;
4513 processCaptureRequest(request, internallyRequestedStreams);
4514
4515 itr = internallyRequestedStreams.begin();
4516 if (itr == internallyRequestedStreams.end()) {
4517 ALOGE("Error Internally Requested Stream list is empty");
4518 assert(0);
4519 } else {
4520 itr->need_metadata = 1;
4521 itr->meteringOnly = 0;
4522 }
4523
4524 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4525 request->frame_number = internalFrameNumber;
4526 processCaptureRequest(request, internallyRequestedStreams);
4527
4528
4529 /* Capture 2X on original streaming config*/
4530 internallyRequestedStreams.clear();
4531
4532 /* Restore original settings pointer */
4533 request->settings = original_settings;
4534 } else {
4535 uint32_t internalFrameNumber;
4536 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4537 request->frame_number = internalFrameNumber;
4538 return processCaptureRequest(request, internallyRequestedStreams);
4539 }
4540
4541 return NO_ERROR;
4542}
4543
4544/*===========================================================================
4545 * FUNCTION : orchestrateResult
4546 *
4547 * DESCRIPTION: Orchestrates a capture result to camera service
4548 *
4549 * PARAMETERS :
4550 * @request : request from framework to process
4551 *
4552 * RETURN :
4553 *
4554 *==========================================================================*/
4555void QCamera3HardwareInterface::orchestrateResult(
4556 camera3_capture_result_t *result)
4557{
4558 uint32_t frameworkFrameNumber;
4559 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4560 frameworkFrameNumber);
4561 if (rc != NO_ERROR) {
4562 LOGE("Cannot find translated frameworkFrameNumber");
4563 assert(0);
4564 } else {
4565 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004566 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004567 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004568 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004569 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4570 camera_metadata_entry_t entry;
4571 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4572 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004573 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004574 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4575 if (ret != OK)
4576 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004577 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004578 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004579 result->frame_number = frameworkFrameNumber;
4580 mCallbackOps->process_capture_result(mCallbackOps, result);
4581 }
4582 }
4583}
4584
4585/*===========================================================================
4586 * FUNCTION : orchestrateNotify
4587 *
4588 * DESCRIPTION: Orchestrates a notify to camera service
4589 *
4590 * PARAMETERS :
4591 * @request : request from framework to process
4592 *
4593 * RETURN :
4594 *
4595 *==========================================================================*/
4596void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4597{
4598 uint32_t frameworkFrameNumber;
4599 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004600 int32_t rc = NO_ERROR;
4601
4602 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004603 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004604
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004605 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004606 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4607 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4608 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004609 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004610 LOGE("Cannot find translated frameworkFrameNumber");
4611 assert(0);
4612 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004613 }
4614 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004615
4616 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4617 LOGD("Internal Request drop the notifyCb");
4618 } else {
4619 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4620 mCallbackOps->notify(mCallbackOps, notify_msg);
4621 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004622}
4623
4624/*===========================================================================
4625 * FUNCTION : FrameNumberRegistry
4626 *
4627 * DESCRIPTION: Constructor
4628 *
4629 * PARAMETERS :
4630 *
4631 * RETURN :
4632 *
4633 *==========================================================================*/
4634FrameNumberRegistry::FrameNumberRegistry()
4635{
4636 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4637}
4638
4639/*===========================================================================
4640 * FUNCTION : ~FrameNumberRegistry
4641 *
4642 * DESCRIPTION: Destructor
4643 *
4644 * PARAMETERS :
4645 *
4646 * RETURN :
4647 *
4648 *==========================================================================*/
4649FrameNumberRegistry::~FrameNumberRegistry()
4650{
4651}
4652
4653/*===========================================================================
4654 * FUNCTION : PurgeOldEntriesLocked
4655 *
4656 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4657 *
4658 * PARAMETERS :
4659 *
4660 * RETURN : NONE
4661 *
4662 *==========================================================================*/
4663void FrameNumberRegistry::purgeOldEntriesLocked()
4664{
4665 while (_register.begin() != _register.end()) {
4666 auto itr = _register.begin();
4667 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4668 _register.erase(itr);
4669 } else {
4670 return;
4671 }
4672 }
4673}
4674
4675/*===========================================================================
4676 * FUNCTION : allocStoreInternalFrameNumber
4677 *
4678 * DESCRIPTION: Method to note down a framework request and associate a new
4679 * internal request number against it
4680 *
4681 * PARAMETERS :
4682 * @fFrameNumber: Identifier given by framework
4683 * @internalFN : Output parameter which will have the newly generated internal
4684 * entry
4685 *
4686 * RETURN : Error code
4687 *
4688 *==========================================================================*/
4689int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4690 uint32_t &internalFrameNumber)
4691{
4692 Mutex::Autolock lock(mRegistryLock);
4693 internalFrameNumber = _nextFreeInternalNumber++;
4694 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4695 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4696 purgeOldEntriesLocked();
4697 return NO_ERROR;
4698}
4699
4700/*===========================================================================
4701 * FUNCTION : generateStoreInternalFrameNumber
4702 *
4703 * DESCRIPTION: Method to associate a new internal request number independent
4704 * of any associate with framework requests
4705 *
4706 * PARAMETERS :
4707 * @internalFrame#: Output parameter which will have the newly generated internal
4708 *
4709 *
4710 * RETURN : Error code
4711 *
4712 *==========================================================================*/
4713int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4714{
4715 Mutex::Autolock lock(mRegistryLock);
4716 internalFrameNumber = _nextFreeInternalNumber++;
4717 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4718 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4719 purgeOldEntriesLocked();
4720 return NO_ERROR;
4721}
4722
4723/*===========================================================================
4724 * FUNCTION : getFrameworkFrameNumber
4725 *
4726 * DESCRIPTION: Method to query the framework framenumber given an internal #
4727 *
4728 * PARAMETERS :
4729 * @internalFrame#: Internal reference
4730 * @frameworkframenumber: Output parameter holding framework frame entry
4731 *
4732 * RETURN : Error code
4733 *
4734 *==========================================================================*/
4735int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4736 uint32_t &frameworkFrameNumber)
4737{
4738 Mutex::Autolock lock(mRegistryLock);
4739 auto itr = _register.find(internalFrameNumber);
4740 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004741 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004742 return -ENOENT;
4743 }
4744
4745 frameworkFrameNumber = itr->second;
4746 purgeOldEntriesLocked();
4747 return NO_ERROR;
4748}
Thierry Strudel3d639192016-09-09 11:52:26 -07004749
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004750status_t QCamera3HardwareInterface::fillPbStreamConfig(
4751 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4752 QCamera3Channel *channel, uint32_t streamIndex) {
4753 if (config == nullptr) {
4754 LOGE("%s: config is null", __FUNCTION__);
4755 return BAD_VALUE;
4756 }
4757
4758 if (channel == nullptr) {
4759 LOGE("%s: channel is null", __FUNCTION__);
4760 return BAD_VALUE;
4761 }
4762
4763 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4764 if (stream == nullptr) {
4765 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4766 return NAME_NOT_FOUND;
4767 }
4768
4769 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4770 if (streamInfo == nullptr) {
4771 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4772 return NAME_NOT_FOUND;
4773 }
4774
4775 config->id = pbStreamId;
4776 config->image.width = streamInfo->dim.width;
4777 config->image.height = streamInfo->dim.height;
4778 config->image.padding = 0;
4779 config->image.format = pbStreamFormat;
4780
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004781 uint32_t totalPlaneSize = 0;
4782
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004783 // Fill plane information.
4784 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4785 pbcamera::PlaneConfiguration plane;
4786 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4787 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4788 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004789
4790 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004791 }
4792
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004793 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004794 return OK;
4795}
4796
Thierry Strudel3d639192016-09-09 11:52:26 -07004797/*===========================================================================
4798 * FUNCTION : processCaptureRequest
4799 *
4800 * DESCRIPTION: process a capture request from camera service
4801 *
4802 * PARAMETERS :
4803 * @request : request from framework to process
4804 *
4805 * RETURN :
4806 *
4807 *==========================================================================*/
4808int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004809 camera3_capture_request_t *request,
4810 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004811{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004812 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004813 int rc = NO_ERROR;
4814 int32_t request_id;
4815 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004816 bool isVidBufRequested = false;
4817 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004818 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004819
4820 pthread_mutex_lock(&mMutex);
4821
4822 // Validate current state
4823 switch (mState) {
4824 case CONFIGURED:
4825 case STARTED:
4826 /* valid state */
4827 break;
4828
4829 case ERROR:
4830 pthread_mutex_unlock(&mMutex);
4831 handleCameraDeviceError();
4832 return -ENODEV;
4833
4834 default:
4835 LOGE("Invalid state %d", mState);
4836 pthread_mutex_unlock(&mMutex);
4837 return -ENODEV;
4838 }
4839
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004840 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004841 if (rc != NO_ERROR) {
4842 LOGE("incoming request is not valid");
4843 pthread_mutex_unlock(&mMutex);
4844 return rc;
4845 }
4846
4847 meta = request->settings;
4848
4849 // For first capture request, send capture intent, and
4850 // stream on all streams
4851 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004852 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004853 // send an unconfigure to the backend so that the isp
4854 // resources are deallocated
4855 if (!mFirstConfiguration) {
4856 cam_stream_size_info_t stream_config_info;
4857 int32_t hal_version = CAM_HAL_V3;
4858 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4859 stream_config_info.buffer_info.min_buffers =
4860 MIN_INFLIGHT_REQUESTS;
4861 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004862 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004863 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004864 clear_metadata_buffer(mParameters);
4865 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4866 CAM_INTF_PARM_HAL_VERSION, hal_version);
4867 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4868 CAM_INTF_META_STREAM_INFO, stream_config_info);
4869 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4870 mParameters);
4871 if (rc < 0) {
4872 LOGE("set_parms for unconfigure failed");
4873 pthread_mutex_unlock(&mMutex);
4874 return rc;
4875 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004876
Thierry Strudel3d639192016-09-09 11:52:26 -07004877 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004878 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004879 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004880 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004881 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004882 property_get("persist.camera.is_type", is_type_value, "4");
4883 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4884 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4885 property_get("persist.camera.is_type_preview", is_type_value, "4");
4886 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4887 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004888
4889 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4890 int32_t hal_version = CAM_HAL_V3;
4891 uint8_t captureIntent =
4892 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4893 mCaptureIntent = captureIntent;
4894 clear_metadata_buffer(mParameters);
4895 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4896 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4897 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004898 if (mFirstConfiguration) {
4899 // configure instant AEC
4900 // Instant AEC is a session based parameter and it is needed only
4901 // once per complete session after open camera.
4902 // i.e. This is set only once for the first capture request, after open camera.
4903 setInstantAEC(meta);
4904 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004905 uint8_t fwkVideoStabMode=0;
4906 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4907 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4908 }
4909
Xue Tuecac74e2017-04-17 13:58:15 -07004910 // If EIS setprop is enabled then only turn it on for video/preview
4911 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004912 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004913 int32_t vsMode;
4914 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4915 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4916 rc = BAD_VALUE;
4917 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004918 LOGD("setEis %d", setEis);
4919 bool eis3Supported = false;
4920 size_t count = IS_TYPE_MAX;
4921 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4922 for (size_t i = 0; i < count; i++) {
4923 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4924 eis3Supported = true;
4925 break;
4926 }
4927 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004928
4929 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004930 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004931 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4932 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004933 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4934 is_type = isTypePreview;
4935 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4936 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4937 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004938 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004939 } else {
4940 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004941 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004942 } else {
4943 is_type = IS_TYPE_NONE;
4944 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004945 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004946 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004947 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4948 }
4949 }
4950
4951 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4952 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4953
Thierry Strudel54dc9782017-02-15 12:12:10 -08004954 //Disable tintless only if the property is set to 0
4955 memset(prop, 0, sizeof(prop));
4956 property_get("persist.camera.tintless.enable", prop, "1");
4957 int32_t tintless_value = atoi(prop);
4958
Thierry Strudel3d639192016-09-09 11:52:26 -07004959 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4960 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004961
Thierry Strudel3d639192016-09-09 11:52:26 -07004962 //Disable CDS for HFR mode or if DIS/EIS is on.
4963 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4964 //after every configure_stream
4965 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4966 (m_bIsVideo)) {
4967 int32_t cds = CAM_CDS_MODE_OFF;
4968 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4969 CAM_INTF_PARM_CDS_MODE, cds))
4970 LOGE("Failed to disable CDS for HFR mode");
4971
4972 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004973
4974 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4975 uint8_t* use_av_timer = NULL;
4976
4977 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004978 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004979 use_av_timer = &m_debug_avtimer;
4980 }
4981 else{
4982 use_av_timer =
4983 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004984 if (use_av_timer) {
4985 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4986 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004987 }
4988
4989 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4990 rc = BAD_VALUE;
4991 }
4992 }
4993
Thierry Strudel3d639192016-09-09 11:52:26 -07004994 setMobicat();
4995
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004996 uint8_t nrMode = 0;
4997 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4998 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4999 }
5000
Thierry Strudel3d639192016-09-09 11:52:26 -07005001 /* Set fps and hfr mode while sending meta stream info so that sensor
5002 * can configure appropriate streaming mode */
5003 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005004 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5005 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005006 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5007 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005008 if (rc == NO_ERROR) {
5009 int32_t max_fps =
5010 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005011 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005012 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5013 }
5014 /* For HFR, more buffers are dequeued upfront to improve the performance */
5015 if (mBatchSize) {
5016 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5017 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5018 }
5019 }
5020 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005021 LOGE("setHalFpsRange failed");
5022 }
5023 }
5024 if (meta.exists(ANDROID_CONTROL_MODE)) {
5025 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5026 rc = extractSceneMode(meta, metaMode, mParameters);
5027 if (rc != NO_ERROR) {
5028 LOGE("extractSceneMode failed");
5029 }
5030 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005031 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005032
Thierry Strudel04e026f2016-10-10 11:27:36 -07005033 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5034 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5035 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5036 rc = setVideoHdrMode(mParameters, vhdr);
5037 if (rc != NO_ERROR) {
5038 LOGE("setVideoHDR is failed");
5039 }
5040 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005041
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005042 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005043 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005044 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005045 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5046 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5047 sensorModeFullFov)) {
5048 rc = BAD_VALUE;
5049 }
5050 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005051 //TODO: validate the arguments, HSV scenemode should have only the
5052 //advertised fps ranges
5053
5054 /*set the capture intent, hal version, tintless, stream info,
5055 *and disenable parameters to the backend*/
5056 LOGD("set_parms META_STREAM_INFO " );
5057 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005058 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5059 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005060 mStreamConfigInfo.type[i],
5061 mStreamConfigInfo.stream_sizes[i].width,
5062 mStreamConfigInfo.stream_sizes[i].height,
5063 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005064 mStreamConfigInfo.format[i],
5065 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005066 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005067
Thierry Strudel3d639192016-09-09 11:52:26 -07005068 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5069 mParameters);
5070 if (rc < 0) {
5071 LOGE("set_parms failed for hal version, stream info");
5072 }
5073
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005074 cam_sensor_mode_info_t sensorModeInfo = {};
5075 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005076 if (rc != NO_ERROR) {
5077 LOGE("Failed to get sensor output size");
5078 pthread_mutex_unlock(&mMutex);
5079 goto error_exit;
5080 }
5081
5082 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5083 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005084 sensorModeInfo.active_array_size.width,
5085 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005086
5087 /* Set batchmode before initializing channel. Since registerBuffer
5088 * internally initializes some of the channels, better set batchmode
5089 * even before first register buffer */
5090 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5091 it != mStreamInfo.end(); it++) {
5092 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5093 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5094 && mBatchSize) {
5095 rc = channel->setBatchSize(mBatchSize);
5096 //Disable per frame map unmap for HFR/batchmode case
5097 rc |= channel->setPerFrameMapUnmap(false);
5098 if (NO_ERROR != rc) {
5099 LOGE("Channel init failed %d", rc);
5100 pthread_mutex_unlock(&mMutex);
5101 goto error_exit;
5102 }
5103 }
5104 }
5105
5106 //First initialize all streams
5107 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5108 it != mStreamInfo.end(); it++) {
5109 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005110
5111 /* Initial value of NR mode is needed before stream on */
5112 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005113 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5114 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005115 setEis) {
5116 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5117 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5118 is_type = mStreamConfigInfo.is_type[i];
5119 break;
5120 }
5121 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005122 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005123 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005124 rc = channel->initialize(IS_TYPE_NONE);
5125 }
5126 if (NO_ERROR != rc) {
5127 LOGE("Channel initialization failed %d", rc);
5128 pthread_mutex_unlock(&mMutex);
5129 goto error_exit;
5130 }
5131 }
5132
5133 if (mRawDumpChannel) {
5134 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5135 if (rc != NO_ERROR) {
5136 LOGE("Error: Raw Dump Channel init failed");
5137 pthread_mutex_unlock(&mMutex);
5138 goto error_exit;
5139 }
5140 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005141 if (mHdrPlusRawSrcChannel) {
5142 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5143 if (rc != NO_ERROR) {
5144 LOGE("Error: HDR+ RAW Source Channel init failed");
5145 pthread_mutex_unlock(&mMutex);
5146 goto error_exit;
5147 }
5148 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005149 if (mSupportChannel) {
5150 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5151 if (rc < 0) {
5152 LOGE("Support channel initialization failed");
5153 pthread_mutex_unlock(&mMutex);
5154 goto error_exit;
5155 }
5156 }
5157 if (mAnalysisChannel) {
5158 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5159 if (rc < 0) {
5160 LOGE("Analysis channel initialization failed");
5161 pthread_mutex_unlock(&mMutex);
5162 goto error_exit;
5163 }
5164 }
5165 if (mDummyBatchChannel) {
5166 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5167 if (rc < 0) {
5168 LOGE("mDummyBatchChannel setBatchSize failed");
5169 pthread_mutex_unlock(&mMutex);
5170 goto error_exit;
5171 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005172 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005173 if (rc < 0) {
5174 LOGE("mDummyBatchChannel initialization failed");
5175 pthread_mutex_unlock(&mMutex);
5176 goto error_exit;
5177 }
5178 }
5179
5180 // Set bundle info
5181 rc = setBundleInfo();
5182 if (rc < 0) {
5183 LOGE("setBundleInfo failed %d", rc);
5184 pthread_mutex_unlock(&mMutex);
5185 goto error_exit;
5186 }
5187
5188 //update settings from app here
5189 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5190 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5191 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5192 }
5193 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5194 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5195 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5196 }
5197 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5198 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5199 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5200
5201 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5202 (mLinkedCameraId != mCameraId) ) {
5203 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5204 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005205 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005206 goto error_exit;
5207 }
5208 }
5209
5210 // add bundle related cameras
5211 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5212 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005213 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5214 &m_pDualCamCmdPtr->bundle_info;
5215 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005216 if (mIsDeviceLinked)
5217 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5218 else
5219 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5220
5221 pthread_mutex_lock(&gCamLock);
5222
5223 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5224 LOGE("Dualcam: Invalid Session Id ");
5225 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005226 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005227 goto error_exit;
5228 }
5229
5230 if (mIsMainCamera == 1) {
5231 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5232 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005233 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005234 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005235 // related session id should be session id of linked session
5236 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5237 } else {
5238 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5239 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005240 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005241 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005242 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5243 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005244 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005245 pthread_mutex_unlock(&gCamLock);
5246
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005247 rc = mCameraHandle->ops->set_dual_cam_cmd(
5248 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005249 if (rc < 0) {
5250 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005251 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005252 goto error_exit;
5253 }
5254 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005255 goto no_error;
5256error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005257 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005258 return rc;
5259no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005260 mWokenUpByDaemon = false;
5261 mPendingLiveRequest = 0;
5262 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005263 }
5264
5265 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005266 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005267
5268 if (mFlushPerf) {
5269 //we cannot accept any requests during flush
5270 LOGE("process_capture_request cannot proceed during flush");
5271 pthread_mutex_unlock(&mMutex);
5272 return NO_ERROR; //should return an error
5273 }
5274
5275 if (meta.exists(ANDROID_REQUEST_ID)) {
5276 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5277 mCurrentRequestId = request_id;
5278 LOGD("Received request with id: %d", request_id);
5279 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5280 LOGE("Unable to find request id field, \
5281 & no previous id available");
5282 pthread_mutex_unlock(&mMutex);
5283 return NAME_NOT_FOUND;
5284 } else {
5285 LOGD("Re-using old request id");
5286 request_id = mCurrentRequestId;
5287 }
5288
5289 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5290 request->num_output_buffers,
5291 request->input_buffer,
5292 frameNumber);
5293 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005294 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005295 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005296 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005297 uint32_t snapshotStreamId = 0;
5298 for (size_t i = 0; i < request->num_output_buffers; i++) {
5299 const camera3_stream_buffer_t& output = request->output_buffers[i];
5300 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5301
Emilian Peev7650c122017-01-19 08:24:33 -08005302 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5303 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005304 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005305 blob_request = 1;
5306 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5307 }
5308
5309 if (output.acquire_fence != -1) {
5310 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5311 close(output.acquire_fence);
5312 if (rc != OK) {
5313 LOGE("sync wait failed %d", rc);
5314 pthread_mutex_unlock(&mMutex);
5315 return rc;
5316 }
5317 }
5318
Emilian Peev0f3c3162017-03-15 12:57:46 +00005319 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5320 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005321 depthRequestPresent = true;
5322 continue;
5323 }
5324
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005325 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005326 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005327
5328 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5329 isVidBufRequested = true;
5330 }
5331 }
5332
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005333 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5334 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5335 itr++) {
5336 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5337 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5338 channel->getStreamID(channel->getStreamTypeMask());
5339
5340 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5341 isVidBufRequested = true;
5342 }
5343 }
5344
Thierry Strudel3d639192016-09-09 11:52:26 -07005345 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005346 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005347 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005348 }
5349 if (blob_request && mRawDumpChannel) {
5350 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005351 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005352 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005353 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005354 }
5355
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005356 {
5357 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5358 // Request a RAW buffer if
5359 // 1. mHdrPlusRawSrcChannel is valid.
5360 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5361 // 3. There is no pending HDR+ request.
5362 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5363 mHdrPlusPendingRequests.size() == 0) {
5364 streamsArray.stream_request[streamsArray.num_streams].streamID =
5365 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5366 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5367 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005368 }
5369
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005370 //extract capture intent
5371 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5372 mCaptureIntent =
5373 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5374 }
5375
5376 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5377 mCacMode =
5378 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5379 }
5380
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005381 uint8_t requestedLensShadingMapMode;
5382 // Get the shading map mode.
5383 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5384 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5385 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5386 } else {
5387 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5388 }
5389
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005390 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005391 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005392
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005393 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005394 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005395 // If this request has a still capture intent, try to submit an HDR+ request.
5396 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5397 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5398 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5399 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005400 }
5401
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005402 if (hdrPlusRequest) {
5403 // For a HDR+ request, just set the frame parameters.
5404 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5405 if (rc < 0) {
5406 LOGE("fail to set frame parameters");
5407 pthread_mutex_unlock(&mMutex);
5408 return rc;
5409 }
5410 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005411 /* Parse the settings:
5412 * - For every request in NORMAL MODE
5413 * - For every request in HFR mode during preview only case
5414 * - For first request of every batch in HFR mode during video
5415 * recording. In batchmode the same settings except frame number is
5416 * repeated in each request of the batch.
5417 */
5418 if (!mBatchSize ||
5419 (mBatchSize && !isVidBufRequested) ||
5420 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005421 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005422 if (rc < 0) {
5423 LOGE("fail to set frame parameters");
5424 pthread_mutex_unlock(&mMutex);
5425 return rc;
5426 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005427
5428 {
5429 // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
5430 // will be reported in result metadata.
5431 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5432 if (mHdrPlusModeEnabled) {
5433 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5434 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5435 }
5436 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005437 }
5438 /* For batchMode HFR, setFrameParameters is not called for every
5439 * request. But only frame number of the latest request is parsed.
5440 * Keep track of first and last frame numbers in a batch so that
5441 * metadata for the frame numbers of batch can be duplicated in
5442 * handleBatchMetadta */
5443 if (mBatchSize) {
5444 if (!mToBeQueuedVidBufs) {
5445 //start of the batch
5446 mFirstFrameNumberInBatch = request->frame_number;
5447 }
5448 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5449 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5450 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005451 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005452 return BAD_VALUE;
5453 }
5454 }
5455 if (mNeedSensorRestart) {
5456 /* Unlock the mutex as restartSensor waits on the channels to be
5457 * stopped, which in turn calls stream callback functions -
5458 * handleBufferWithLock and handleMetadataWithLock */
5459 pthread_mutex_unlock(&mMutex);
5460 rc = dynamicUpdateMetaStreamInfo();
5461 if (rc != NO_ERROR) {
5462 LOGE("Restarting the sensor failed");
5463 return BAD_VALUE;
5464 }
5465 mNeedSensorRestart = false;
5466 pthread_mutex_lock(&mMutex);
5467 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005468 if(mResetInstantAEC) {
5469 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5470 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5471 mResetInstantAEC = false;
5472 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005473 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005474 if (request->input_buffer->acquire_fence != -1) {
5475 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5476 close(request->input_buffer->acquire_fence);
5477 if (rc != OK) {
5478 LOGE("input buffer sync wait failed %d", rc);
5479 pthread_mutex_unlock(&mMutex);
5480 return rc;
5481 }
5482 }
5483 }
5484
5485 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5486 mLastCustIntentFrmNum = frameNumber;
5487 }
5488 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005489 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005490 pendingRequestIterator latestRequest;
5491 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005492 pendingRequest.num_buffers = depthRequestPresent ?
5493 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005494 pendingRequest.request_id = request_id;
5495 pendingRequest.blob_request = blob_request;
5496 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005497 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005498 if (request->input_buffer) {
5499 pendingRequest.input_buffer =
5500 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5501 *(pendingRequest.input_buffer) = *(request->input_buffer);
5502 pInputBuffer = pendingRequest.input_buffer;
5503 } else {
5504 pendingRequest.input_buffer = NULL;
5505 pInputBuffer = NULL;
5506 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005507 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005508
5509 pendingRequest.pipeline_depth = 0;
5510 pendingRequest.partial_result_cnt = 0;
5511 extractJpegMetadata(mCurJpegMeta, request);
5512 pendingRequest.jpegMetadata = mCurJpegMeta;
5513 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005514 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005515 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5516 mHybridAeEnable =
5517 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5518 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005519
5520 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5521 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005522 /* DevCamDebug metadata processCaptureRequest */
5523 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5524 mDevCamDebugMetaEnable =
5525 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5526 }
5527 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5528 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005529
5530 //extract CAC info
5531 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5532 mCacMode =
5533 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5534 }
5535 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005536 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005537 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5538 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005539
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005540 // extract enableZsl info
5541 if (gExposeEnableZslKey) {
5542 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5543 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5544 mZslEnabled = pendingRequest.enableZsl;
5545 } else {
5546 pendingRequest.enableZsl = mZslEnabled;
5547 }
5548 }
5549
Thierry Strudel3d639192016-09-09 11:52:26 -07005550 PendingBuffersInRequest bufsForCurRequest;
5551 bufsForCurRequest.frame_number = frameNumber;
5552 // Mark current timestamp for the new request
5553 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005554 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005555
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005556 if (hdrPlusRequest) {
5557 // Save settings for this request.
5558 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5559 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5560
5561 // Add to pending HDR+ request queue.
5562 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5563 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5564
5565 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5566 }
5567
Thierry Strudel3d639192016-09-09 11:52:26 -07005568 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005569 if ((request->output_buffers[i].stream->data_space ==
5570 HAL_DATASPACE_DEPTH) &&
5571 (HAL_PIXEL_FORMAT_BLOB ==
5572 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005573 continue;
5574 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005575 RequestedBufferInfo requestedBuf;
5576 memset(&requestedBuf, 0, sizeof(requestedBuf));
5577 requestedBuf.stream = request->output_buffers[i].stream;
5578 requestedBuf.buffer = NULL;
5579 pendingRequest.buffers.push_back(requestedBuf);
5580
5581 // Add to buffer handle the pending buffers list
5582 PendingBufferInfo bufferInfo;
5583 bufferInfo.buffer = request->output_buffers[i].buffer;
5584 bufferInfo.stream = request->output_buffers[i].stream;
5585 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5586 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5587 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5588 frameNumber, bufferInfo.buffer,
5589 channel->getStreamTypeMask(), bufferInfo.stream->format);
5590 }
5591 // Add this request packet into mPendingBuffersMap
5592 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5593 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5594 mPendingBuffersMap.get_num_overall_buffers());
5595
5596 latestRequest = mPendingRequestsList.insert(
5597 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005598
5599 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5600 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005601 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005602 for (size_t i = 0; i < request->num_output_buffers; i++) {
5603 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5604 }
5605
Thierry Strudel3d639192016-09-09 11:52:26 -07005606 if(mFlush) {
5607 LOGI("mFlush is true");
5608 pthread_mutex_unlock(&mMutex);
5609 return NO_ERROR;
5610 }
5611
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005612 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5613 // channel.
5614 if (!hdrPlusRequest) {
5615 int indexUsed;
5616 // Notify metadata channel we receive a request
5617 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005618
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005619 if(request->input_buffer != NULL){
5620 LOGD("Input request, frame_number %d", frameNumber);
5621 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5622 if (NO_ERROR != rc) {
5623 LOGE("fail to set reproc parameters");
5624 pthread_mutex_unlock(&mMutex);
5625 return rc;
5626 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005627 }
5628
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005629 // Call request on other streams
5630 uint32_t streams_need_metadata = 0;
5631 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5632 for (size_t i = 0; i < request->num_output_buffers; i++) {
5633 const camera3_stream_buffer_t& output = request->output_buffers[i];
5634 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5635
5636 if (channel == NULL) {
5637 LOGW("invalid channel pointer for stream");
5638 continue;
5639 }
5640
5641 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5642 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5643 output.buffer, request->input_buffer, frameNumber);
5644 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005645 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005646 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5647 if (rc < 0) {
5648 LOGE("Fail to request on picture channel");
5649 pthread_mutex_unlock(&mMutex);
5650 return rc;
5651 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005652 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005653 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5654 assert(NULL != mDepthChannel);
5655 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005656
Emilian Peev7650c122017-01-19 08:24:33 -08005657 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5658 if (rc < 0) {
5659 LOGE("Fail to map on depth buffer");
5660 pthread_mutex_unlock(&mMutex);
5661 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005662 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005663 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005664 } else {
5665 LOGD("snapshot request with buffer %p, frame_number %d",
5666 output.buffer, frameNumber);
5667 if (!request->settings) {
5668 rc = channel->request(output.buffer, frameNumber,
5669 NULL, mPrevParameters, indexUsed);
5670 } else {
5671 rc = channel->request(output.buffer, frameNumber,
5672 NULL, mParameters, indexUsed);
5673 }
5674 if (rc < 0) {
5675 LOGE("Fail to request on picture channel");
5676 pthread_mutex_unlock(&mMutex);
5677 return rc;
5678 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005679
Emilian Peev7650c122017-01-19 08:24:33 -08005680 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5681 uint32_t j = 0;
5682 for (j = 0; j < streamsArray.num_streams; j++) {
5683 if (streamsArray.stream_request[j].streamID == streamId) {
5684 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5685 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5686 else
5687 streamsArray.stream_request[j].buf_index = indexUsed;
5688 break;
5689 }
5690 }
5691 if (j == streamsArray.num_streams) {
5692 LOGE("Did not find matching stream to update index");
5693 assert(0);
5694 }
5695
5696 pendingBufferIter->need_metadata = true;
5697 streams_need_metadata++;
5698 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005699 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005700 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5701 bool needMetadata = false;
5702 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5703 rc = yuvChannel->request(output.buffer, frameNumber,
5704 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5705 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005706 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005707 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005708 pthread_mutex_unlock(&mMutex);
5709 return rc;
5710 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005711
5712 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5713 uint32_t j = 0;
5714 for (j = 0; j < streamsArray.num_streams; j++) {
5715 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005716 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5717 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5718 else
5719 streamsArray.stream_request[j].buf_index = indexUsed;
5720 break;
5721 }
5722 }
5723 if (j == streamsArray.num_streams) {
5724 LOGE("Did not find matching stream to update index");
5725 assert(0);
5726 }
5727
5728 pendingBufferIter->need_metadata = needMetadata;
5729 if (needMetadata)
5730 streams_need_metadata += 1;
5731 LOGD("calling YUV channel request, need_metadata is %d",
5732 needMetadata);
5733 } else {
5734 LOGD("request with buffer %p, frame_number %d",
5735 output.buffer, frameNumber);
5736
5737 rc = channel->request(output.buffer, frameNumber, indexUsed);
5738
5739 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5740 uint32_t j = 0;
5741 for (j = 0; j < streamsArray.num_streams; j++) {
5742 if (streamsArray.stream_request[j].streamID == streamId) {
5743 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5744 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5745 else
5746 streamsArray.stream_request[j].buf_index = indexUsed;
5747 break;
5748 }
5749 }
5750 if (j == streamsArray.num_streams) {
5751 LOGE("Did not find matching stream to update index");
5752 assert(0);
5753 }
5754
5755 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5756 && mBatchSize) {
5757 mToBeQueuedVidBufs++;
5758 if (mToBeQueuedVidBufs == mBatchSize) {
5759 channel->queueBatchBuf();
5760 }
5761 }
5762 if (rc < 0) {
5763 LOGE("request failed");
5764 pthread_mutex_unlock(&mMutex);
5765 return rc;
5766 }
5767 }
5768 pendingBufferIter++;
5769 }
5770
5771 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5772 itr++) {
5773 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5774
5775 if (channel == NULL) {
5776 LOGE("invalid channel pointer for stream");
5777 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005778 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005779 return BAD_VALUE;
5780 }
5781
5782 InternalRequest requestedStream;
5783 requestedStream = (*itr);
5784
5785
5786 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5787 LOGD("snapshot request internally input buffer %p, frame_number %d",
5788 request->input_buffer, frameNumber);
5789 if(request->input_buffer != NULL){
5790 rc = channel->request(NULL, frameNumber,
5791 pInputBuffer, &mReprocMeta, indexUsed, true,
5792 requestedStream.meteringOnly);
5793 if (rc < 0) {
5794 LOGE("Fail to request on picture channel");
5795 pthread_mutex_unlock(&mMutex);
5796 return rc;
5797 }
5798 } else {
5799 LOGD("snapshot request with frame_number %d", frameNumber);
5800 if (!request->settings) {
5801 rc = channel->request(NULL, frameNumber,
5802 NULL, mPrevParameters, indexUsed, true,
5803 requestedStream.meteringOnly);
5804 } else {
5805 rc = channel->request(NULL, frameNumber,
5806 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5807 }
5808 if (rc < 0) {
5809 LOGE("Fail to request on picture channel");
5810 pthread_mutex_unlock(&mMutex);
5811 return rc;
5812 }
5813
5814 if ((*itr).meteringOnly != 1) {
5815 requestedStream.need_metadata = 1;
5816 streams_need_metadata++;
5817 }
5818 }
5819
5820 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5821 uint32_t j = 0;
5822 for (j = 0; j < streamsArray.num_streams; j++) {
5823 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005824 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5825 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5826 else
5827 streamsArray.stream_request[j].buf_index = indexUsed;
5828 break;
5829 }
5830 }
5831 if (j == streamsArray.num_streams) {
5832 LOGE("Did not find matching stream to update index");
5833 assert(0);
5834 }
5835
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005836 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005837 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005838 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005839 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005840 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005841 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005842 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005843 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005844
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005845 //If 2 streams have need_metadata set to true, fail the request, unless
5846 //we copy/reference count the metadata buffer
5847 if (streams_need_metadata > 1) {
5848 LOGE("not supporting request in which two streams requires"
5849 " 2 HAL metadata for reprocessing");
5850 pthread_mutex_unlock(&mMutex);
5851 return -EINVAL;
5852 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005853
Emilian Peev656e4fa2017-06-02 16:47:04 +01005854 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5855 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5856 if (depthRequestPresent && mDepthChannel) {
5857 if (request->settings) {
5858 camera_metadata_ro_entry entry;
5859 if (find_camera_metadata_ro_entry(request->settings,
5860 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5861 if (entry.data.u8[0]) {
5862 pdafEnable = CAM_PD_DATA_ENABLED;
5863 } else {
5864 pdafEnable = CAM_PD_DATA_SKIP;
5865 }
5866 mDepthCloudMode = pdafEnable;
5867 } else {
5868 pdafEnable = mDepthCloudMode;
5869 }
5870 } else {
5871 pdafEnable = mDepthCloudMode;
5872 }
5873 }
5874
Emilian Peev7650c122017-01-19 08:24:33 -08005875 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5876 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5877 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5878 pthread_mutex_unlock(&mMutex);
5879 return BAD_VALUE;
5880 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005881
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005882 if (request->input_buffer == NULL) {
5883 /* Set the parameters to backend:
5884 * - For every request in NORMAL MODE
5885 * - For every request in HFR mode during preview only case
5886 * - Once every batch in HFR mode during video recording
5887 */
5888 if (!mBatchSize ||
5889 (mBatchSize && !isVidBufRequested) ||
5890 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5891 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5892 mBatchSize, isVidBufRequested,
5893 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005894
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005895 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5896 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5897 uint32_t m = 0;
5898 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5899 if (streamsArray.stream_request[k].streamID ==
5900 mBatchedStreamsArray.stream_request[m].streamID)
5901 break;
5902 }
5903 if (m == mBatchedStreamsArray.num_streams) {
5904 mBatchedStreamsArray.stream_request\
5905 [mBatchedStreamsArray.num_streams].streamID =
5906 streamsArray.stream_request[k].streamID;
5907 mBatchedStreamsArray.stream_request\
5908 [mBatchedStreamsArray.num_streams].buf_index =
5909 streamsArray.stream_request[k].buf_index;
5910 mBatchedStreamsArray.num_streams =
5911 mBatchedStreamsArray.num_streams + 1;
5912 }
5913 }
5914 streamsArray = mBatchedStreamsArray;
5915 }
5916 /* Update stream id of all the requested buffers */
5917 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5918 streamsArray)) {
5919 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005920 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005921 return BAD_VALUE;
5922 }
5923
5924 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5925 mParameters);
5926 if (rc < 0) {
5927 LOGE("set_parms failed");
5928 }
5929 /* reset to zero coz, the batch is queued */
5930 mToBeQueuedVidBufs = 0;
5931 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5932 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5933 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005934 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5935 uint32_t m = 0;
5936 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5937 if (streamsArray.stream_request[k].streamID ==
5938 mBatchedStreamsArray.stream_request[m].streamID)
5939 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005940 }
5941 if (m == mBatchedStreamsArray.num_streams) {
5942 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5943 streamID = streamsArray.stream_request[k].streamID;
5944 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5945 buf_index = streamsArray.stream_request[k].buf_index;
5946 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5947 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005948 }
5949 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005950 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005951
5952 // Start all streams after the first setting is sent, so that the
5953 // setting can be applied sooner: (0 + apply_delay)th frame.
5954 if (mState == CONFIGURED && mChannelHandle) {
5955 //Then start them.
5956 LOGH("Start META Channel");
5957 rc = mMetadataChannel->start();
5958 if (rc < 0) {
5959 LOGE("META channel start failed");
5960 pthread_mutex_unlock(&mMutex);
5961 return rc;
5962 }
5963
5964 if (mAnalysisChannel) {
5965 rc = mAnalysisChannel->start();
5966 if (rc < 0) {
5967 LOGE("Analysis channel start failed");
5968 mMetadataChannel->stop();
5969 pthread_mutex_unlock(&mMutex);
5970 return rc;
5971 }
5972 }
5973
5974 if (mSupportChannel) {
5975 rc = mSupportChannel->start();
5976 if (rc < 0) {
5977 LOGE("Support channel start failed");
5978 mMetadataChannel->stop();
5979 /* Although support and analysis are mutually exclusive today
5980 adding it in anycase for future proofing */
5981 if (mAnalysisChannel) {
5982 mAnalysisChannel->stop();
5983 }
5984 pthread_mutex_unlock(&mMutex);
5985 return rc;
5986 }
5987 }
5988 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5989 it != mStreamInfo.end(); it++) {
5990 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5991 LOGH("Start Processing Channel mask=%d",
5992 channel->getStreamTypeMask());
5993 rc = channel->start();
5994 if (rc < 0) {
5995 LOGE("channel start failed");
5996 pthread_mutex_unlock(&mMutex);
5997 return rc;
5998 }
5999 }
6000
6001 if (mRawDumpChannel) {
6002 LOGD("Starting raw dump stream");
6003 rc = mRawDumpChannel->start();
6004 if (rc != NO_ERROR) {
6005 LOGE("Error Starting Raw Dump Channel");
6006 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6007 it != mStreamInfo.end(); it++) {
6008 QCamera3Channel *channel =
6009 (QCamera3Channel *)(*it)->stream->priv;
6010 LOGH("Stopping Processing Channel mask=%d",
6011 channel->getStreamTypeMask());
6012 channel->stop();
6013 }
6014 if (mSupportChannel)
6015 mSupportChannel->stop();
6016 if (mAnalysisChannel) {
6017 mAnalysisChannel->stop();
6018 }
6019 mMetadataChannel->stop();
6020 pthread_mutex_unlock(&mMutex);
6021 return rc;
6022 }
6023 }
6024
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006025 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006026 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006027 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006028 if (rc != NO_ERROR) {
6029 LOGE("start_channel failed %d", rc);
6030 pthread_mutex_unlock(&mMutex);
6031 return rc;
6032 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006033
6034 {
6035 // Configure Easel for stream on.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006036 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07006037
6038 // Now that sensor mode should have been selected, get the selected sensor mode
6039 // info.
6040 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6041 getCurrentSensorModeInfo(mSensorModeInfo);
6042
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006043 if (EaselManagerClientOpened) {
6044 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006045 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6046 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006047 if (rc != OK) {
6048 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6049 mCameraId, mSensorModeInfo.op_pixel_clk);
6050 pthread_mutex_unlock(&mMutex);
6051 return rc;
6052 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07006053 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006054 }
6055 }
6056
6057 // Start sensor streaming.
6058 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6059 mChannelHandle);
6060 if (rc != NO_ERROR) {
6061 LOGE("start_sensor_stream_on failed %d", rc);
6062 pthread_mutex_unlock(&mMutex);
6063 return rc;
6064 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006065 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006066 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006067 }
6068
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006069 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chenjie Luo4a761802017-06-13 17:35:54 +00006070 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006071 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006072 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006073 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6074 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6075 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6076 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006077
6078 if (isSessionHdrPlusModeCompatible()) {
6079 rc = enableHdrPlusModeLocked();
6080 if (rc != OK) {
6081 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6082 pthread_mutex_unlock(&mMutex);
6083 return rc;
6084 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006085 }
6086
6087 mFirstPreviewIntentSeen = true;
6088 }
6089 }
6090
Thierry Strudel3d639192016-09-09 11:52:26 -07006091 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6092
6093 mState = STARTED;
6094 // Added a timed condition wait
6095 struct timespec ts;
6096 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006097 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006098 if (rc < 0) {
6099 isValidTimeout = 0;
6100 LOGE("Error reading the real time clock!!");
6101 }
6102 else {
6103 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006104 int64_t timeout = 5;
6105 {
6106 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6107 // If there is a pending HDR+ request, the following requests may be blocked until the
6108 // HDR+ request is done. So allow a longer timeout.
6109 if (mHdrPlusPendingRequests.size() > 0) {
6110 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6111 }
6112 }
6113 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006114 }
6115 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006116 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006117 (mState != ERROR) && (mState != DEINIT)) {
6118 if (!isValidTimeout) {
6119 LOGD("Blocking on conditional wait");
6120 pthread_cond_wait(&mRequestCond, &mMutex);
6121 }
6122 else {
6123 LOGD("Blocking on timed conditional wait");
6124 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6125 if (rc == ETIMEDOUT) {
6126 rc = -ENODEV;
6127 LOGE("Unblocked on timeout!!!!");
6128 break;
6129 }
6130 }
6131 LOGD("Unblocked");
6132 if (mWokenUpByDaemon) {
6133 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006134 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006135 break;
6136 }
6137 }
6138 pthread_mutex_unlock(&mMutex);
6139
6140 return rc;
6141}
6142
6143/*===========================================================================
6144 * FUNCTION : dump
6145 *
6146 * DESCRIPTION:
6147 *
6148 * PARAMETERS :
6149 *
6150 *
6151 * RETURN :
6152 *==========================================================================*/
6153void QCamera3HardwareInterface::dump(int fd)
6154{
6155 pthread_mutex_lock(&mMutex);
6156 dprintf(fd, "\n Camera HAL3 information Begin \n");
6157
6158 dprintf(fd, "\nNumber of pending requests: %zu \n",
6159 mPendingRequestsList.size());
6160 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6161 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6162 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6163 for(pendingRequestIterator i = mPendingRequestsList.begin();
6164 i != mPendingRequestsList.end(); i++) {
6165 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6166 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6167 i->input_buffer);
6168 }
6169 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6170 mPendingBuffersMap.get_num_overall_buffers());
6171 dprintf(fd, "-------+------------------\n");
6172 dprintf(fd, " Frame | Stream type mask \n");
6173 dprintf(fd, "-------+------------------\n");
6174 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6175 for(auto &j : req.mPendingBufferList) {
6176 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6177 dprintf(fd, " %5d | %11d \n",
6178 req.frame_number, channel->getStreamTypeMask());
6179 }
6180 }
6181 dprintf(fd, "-------+------------------\n");
6182
6183 dprintf(fd, "\nPending frame drop list: %zu\n",
6184 mPendingFrameDropList.size());
6185 dprintf(fd, "-------+-----------\n");
6186 dprintf(fd, " Frame | Stream ID \n");
6187 dprintf(fd, "-------+-----------\n");
6188 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6189 i != mPendingFrameDropList.end(); i++) {
6190 dprintf(fd, " %5d | %9d \n",
6191 i->frame_number, i->stream_ID);
6192 }
6193 dprintf(fd, "-------+-----------\n");
6194
6195 dprintf(fd, "\n Camera HAL3 information End \n");
6196
6197 /* use dumpsys media.camera as trigger to send update debug level event */
6198 mUpdateDebugLevel = true;
6199 pthread_mutex_unlock(&mMutex);
6200 return;
6201}
6202
6203/*===========================================================================
6204 * FUNCTION : flush
6205 *
6206 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6207 * conditionally restarts channels
6208 *
6209 * PARAMETERS :
6210 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006211 * @ stopChannelImmediately: stop the channel immediately. This should be used
6212 * when device encountered an error and MIPI may has
6213 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006214 *
6215 * RETURN :
6216 * 0 on success
6217 * Error code on failure
6218 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006219int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006220{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006221 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006222 int32_t rc = NO_ERROR;
6223
6224 LOGD("Unblocking Process Capture Request");
6225 pthread_mutex_lock(&mMutex);
6226 mFlush = true;
6227 pthread_mutex_unlock(&mMutex);
6228
6229 rc = stopAllChannels();
6230 // unlink of dualcam
6231 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006232 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6233 &m_pDualCamCmdPtr->bundle_info;
6234 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006235 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6236 pthread_mutex_lock(&gCamLock);
6237
6238 if (mIsMainCamera == 1) {
6239 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6240 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006241 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006242 // related session id should be session id of linked session
6243 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6244 } else {
6245 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6246 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006247 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006248 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6249 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006250 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006251 pthread_mutex_unlock(&gCamLock);
6252
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006253 rc = mCameraHandle->ops->set_dual_cam_cmd(
6254 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006255 if (rc < 0) {
6256 LOGE("Dualcam: Unlink failed, but still proceed to close");
6257 }
6258 }
6259
6260 if (rc < 0) {
6261 LOGE("stopAllChannels failed");
6262 return rc;
6263 }
6264 if (mChannelHandle) {
6265 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006266 mChannelHandle, stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006267 }
6268
6269 // Reset bundle info
6270 rc = setBundleInfo();
6271 if (rc < 0) {
6272 LOGE("setBundleInfo failed %d", rc);
6273 return rc;
6274 }
6275
6276 // Mutex Lock
6277 pthread_mutex_lock(&mMutex);
6278
6279 // Unblock process_capture_request
6280 mPendingLiveRequest = 0;
6281 pthread_cond_signal(&mRequestCond);
6282
6283 rc = notifyErrorForPendingRequests();
6284 if (rc < 0) {
6285 LOGE("notifyErrorForPendingRequests failed");
6286 pthread_mutex_unlock(&mMutex);
6287 return rc;
6288 }
6289
6290 mFlush = false;
6291
6292 // Start the Streams/Channels
6293 if (restartChannels) {
6294 rc = startAllChannels();
6295 if (rc < 0) {
6296 LOGE("startAllChannels failed");
6297 pthread_mutex_unlock(&mMutex);
6298 return rc;
6299 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006300 if (mChannelHandle) {
6301 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006302 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006303 if (rc < 0) {
6304 LOGE("start_channel failed");
6305 pthread_mutex_unlock(&mMutex);
6306 return rc;
6307 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006308 }
6309 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006310 pthread_mutex_unlock(&mMutex);
6311
6312 return 0;
6313}
6314
6315/*===========================================================================
6316 * FUNCTION : flushPerf
6317 *
6318 * DESCRIPTION: This is the performance optimization version of flush that does
6319 * not use stream off, rather flushes the system
6320 *
6321 * PARAMETERS :
6322 *
6323 *
6324 * RETURN : 0 : success
6325 * -EINVAL: input is malformed (device is not valid)
6326 * -ENODEV: if the device has encountered a serious error
6327 *==========================================================================*/
6328int QCamera3HardwareInterface::flushPerf()
6329{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006330 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006331 int32_t rc = 0;
6332 struct timespec timeout;
6333 bool timed_wait = false;
6334
6335 pthread_mutex_lock(&mMutex);
6336 mFlushPerf = true;
6337 mPendingBuffersMap.numPendingBufsAtFlush =
6338 mPendingBuffersMap.get_num_overall_buffers();
6339 LOGD("Calling flush. Wait for %d buffers to return",
6340 mPendingBuffersMap.numPendingBufsAtFlush);
6341
6342 /* send the flush event to the backend */
6343 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6344 if (rc < 0) {
6345 LOGE("Error in flush: IOCTL failure");
6346 mFlushPerf = false;
6347 pthread_mutex_unlock(&mMutex);
6348 return -ENODEV;
6349 }
6350
6351 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6352 LOGD("No pending buffers in HAL, return flush");
6353 mFlushPerf = false;
6354 pthread_mutex_unlock(&mMutex);
6355 return rc;
6356 }
6357
6358 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006359 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006360 if (rc < 0) {
6361 LOGE("Error reading the real time clock, cannot use timed wait");
6362 } else {
6363 timeout.tv_sec += FLUSH_TIMEOUT;
6364 timed_wait = true;
6365 }
6366
6367 //Block on conditional variable
6368 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6369 LOGD("Waiting on mBuffersCond");
6370 if (!timed_wait) {
6371 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6372 if (rc != 0) {
6373 LOGE("pthread_cond_wait failed due to rc = %s",
6374 strerror(rc));
6375 break;
6376 }
6377 } else {
6378 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6379 if (rc != 0) {
6380 LOGE("pthread_cond_timedwait failed due to rc = %s",
6381 strerror(rc));
6382 break;
6383 }
6384 }
6385 }
6386 if (rc != 0) {
6387 mFlushPerf = false;
6388 pthread_mutex_unlock(&mMutex);
6389 return -ENODEV;
6390 }
6391
6392 LOGD("Received buffers, now safe to return them");
6393
6394 //make sure the channels handle flush
6395 //currently only required for the picture channel to release snapshot resources
6396 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6397 it != mStreamInfo.end(); it++) {
6398 QCamera3Channel *channel = (*it)->channel;
6399 if (channel) {
6400 rc = channel->flush();
6401 if (rc) {
6402 LOGE("Flushing the channels failed with error %d", rc);
6403 // even though the channel flush failed we need to continue and
6404 // return the buffers we have to the framework, however the return
6405 // value will be an error
6406 rc = -ENODEV;
6407 }
6408 }
6409 }
6410
6411 /* notify the frameworks and send errored results */
6412 rc = notifyErrorForPendingRequests();
6413 if (rc < 0) {
6414 LOGE("notifyErrorForPendingRequests failed");
6415 pthread_mutex_unlock(&mMutex);
6416 return rc;
6417 }
6418
6419 //unblock process_capture_request
6420 mPendingLiveRequest = 0;
6421 unblockRequestIfNecessary();
6422
6423 mFlushPerf = false;
6424 pthread_mutex_unlock(&mMutex);
6425 LOGD ("Flush Operation complete. rc = %d", rc);
6426 return rc;
6427}
6428
6429/*===========================================================================
6430 * FUNCTION : handleCameraDeviceError
6431 *
6432 * DESCRIPTION: This function calls internal flush and notifies the error to
6433 * framework and updates the state variable.
6434 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006435 * PARAMETERS :
6436 * @stopChannelImmediately : stop channels immediately without waiting for
6437 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006438 *
6439 * RETURN : NO_ERROR on Success
6440 * Error code on failure
6441 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006442int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006443{
6444 int32_t rc = NO_ERROR;
6445
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006446 {
6447 Mutex::Autolock lock(mFlushLock);
6448 pthread_mutex_lock(&mMutex);
6449 if (mState != ERROR) {
6450 //if mState != ERROR, nothing to be done
6451 pthread_mutex_unlock(&mMutex);
6452 return NO_ERROR;
6453 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006454 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006455
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006456 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006457 if (NO_ERROR != rc) {
6458 LOGE("internal flush to handle mState = ERROR failed");
6459 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006460
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006461 pthread_mutex_lock(&mMutex);
6462 mState = DEINIT;
6463 pthread_mutex_unlock(&mMutex);
6464 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006465
6466 camera3_notify_msg_t notify_msg;
6467 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6468 notify_msg.type = CAMERA3_MSG_ERROR;
6469 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6470 notify_msg.message.error.error_stream = NULL;
6471 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006472 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006473
6474 return rc;
6475}
6476
6477/*===========================================================================
6478 * FUNCTION : captureResultCb
6479 *
6480 * DESCRIPTION: Callback handler for all capture result
6481 * (streams, as well as metadata)
6482 *
6483 * PARAMETERS :
6484 * @metadata : metadata information
6485 * @buffer : actual gralloc buffer to be returned to frameworks.
6486 * NULL if metadata.
6487 *
6488 * RETURN : NONE
6489 *==========================================================================*/
6490void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6491 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6492{
6493 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006494 pthread_mutex_lock(&mMutex);
6495 uint8_t batchSize = mBatchSize;
6496 pthread_mutex_unlock(&mMutex);
6497 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006498 handleBatchMetadata(metadata_buf,
6499 true /* free_and_bufdone_meta_buf */);
6500 } else { /* mBatchSize = 0 */
6501 hdrPlusPerfLock(metadata_buf);
6502 pthread_mutex_lock(&mMutex);
6503 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006504 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006505 true /* last urgent frame of batch metadata */,
6506 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006507 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006508 pthread_mutex_unlock(&mMutex);
6509 }
6510 } else if (isInputBuffer) {
6511 pthread_mutex_lock(&mMutex);
6512 handleInputBufferWithLock(frame_number);
6513 pthread_mutex_unlock(&mMutex);
6514 } else {
6515 pthread_mutex_lock(&mMutex);
6516 handleBufferWithLock(buffer, frame_number);
6517 pthread_mutex_unlock(&mMutex);
6518 }
6519 return;
6520}
6521
6522/*===========================================================================
6523 * FUNCTION : getReprocessibleOutputStreamId
6524 *
6525 * DESCRIPTION: Get source output stream id for the input reprocess stream
6526 * based on size and format, which would be the largest
6527 * output stream if an input stream exists.
6528 *
6529 * PARAMETERS :
6530 * @id : return the stream id if found
6531 *
6532 * RETURN : int32_t type of status
6533 * NO_ERROR -- success
6534 * none-zero failure code
6535 *==========================================================================*/
6536int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6537{
6538 /* check if any output or bidirectional stream with the same size and format
6539 and return that stream */
6540 if ((mInputStreamInfo.dim.width > 0) &&
6541 (mInputStreamInfo.dim.height > 0)) {
6542 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6543 it != mStreamInfo.end(); it++) {
6544
6545 camera3_stream_t *stream = (*it)->stream;
6546 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6547 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6548 (stream->format == mInputStreamInfo.format)) {
6549 // Usage flag for an input stream and the source output stream
6550 // may be different.
6551 LOGD("Found reprocessible output stream! %p", *it);
6552 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6553 stream->usage, mInputStreamInfo.usage);
6554
6555 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6556 if (channel != NULL && channel->mStreams[0]) {
6557 id = channel->mStreams[0]->getMyServerID();
6558 return NO_ERROR;
6559 }
6560 }
6561 }
6562 } else {
6563 LOGD("No input stream, so no reprocessible output stream");
6564 }
6565 return NAME_NOT_FOUND;
6566}
6567
6568/*===========================================================================
6569 * FUNCTION : lookupFwkName
6570 *
6571 * DESCRIPTION: In case the enum is not same in fwk and backend
6572 * make sure the parameter is correctly propogated
6573 *
6574 * PARAMETERS :
6575 * @arr : map between the two enums
6576 * @len : len of the map
6577 * @hal_name : name of the hal_parm to map
6578 *
6579 * RETURN : int type of status
6580 * fwk_name -- success
6581 * none-zero failure code
6582 *==========================================================================*/
6583template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6584 size_t len, halType hal_name)
6585{
6586
6587 for (size_t i = 0; i < len; i++) {
6588 if (arr[i].hal_name == hal_name) {
6589 return arr[i].fwk_name;
6590 }
6591 }
6592
6593 /* Not able to find matching framework type is not necessarily
6594 * an error case. This happens when mm-camera supports more attributes
6595 * than the frameworks do */
6596 LOGH("Cannot find matching framework type");
6597 return NAME_NOT_FOUND;
6598}
6599
6600/*===========================================================================
6601 * FUNCTION : lookupHalName
6602 *
6603 * DESCRIPTION: In case the enum is not same in fwk and backend
6604 * make sure the parameter is correctly propogated
6605 *
6606 * PARAMETERS :
6607 * @arr : map between the two enums
6608 * @len : len of the map
6609 * @fwk_name : name of the hal_parm to map
6610 *
6611 * RETURN : int32_t type of status
6612 * hal_name -- success
6613 * none-zero failure code
6614 *==========================================================================*/
6615template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6616 size_t len, fwkType fwk_name)
6617{
6618 for (size_t i = 0; i < len; i++) {
6619 if (arr[i].fwk_name == fwk_name) {
6620 return arr[i].hal_name;
6621 }
6622 }
6623
6624 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6625 return NAME_NOT_FOUND;
6626}
6627
6628/*===========================================================================
6629 * FUNCTION : lookupProp
6630 *
6631 * DESCRIPTION: lookup a value by its name
6632 *
6633 * PARAMETERS :
6634 * @arr : map between the two enums
6635 * @len : size of the map
6636 * @name : name to be looked up
6637 *
6638 * RETURN : Value if found
6639 * CAM_CDS_MODE_MAX if not found
6640 *==========================================================================*/
6641template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6642 size_t len, const char *name)
6643{
6644 if (name) {
6645 for (size_t i = 0; i < len; i++) {
6646 if (!strcmp(arr[i].desc, name)) {
6647 return arr[i].val;
6648 }
6649 }
6650 }
6651 return CAM_CDS_MODE_MAX;
6652}
6653
6654/*===========================================================================
6655 *
6656 * DESCRIPTION:
6657 *
6658 * PARAMETERS :
6659 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006660 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006661 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006662 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6663 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006664 *
6665 * RETURN : camera_metadata_t*
6666 * metadata in a format specified by fwk
6667 *==========================================================================*/
6668camera_metadata_t*
6669QCamera3HardwareInterface::translateFromHalMetadata(
6670 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006671 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006672 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006673 bool lastMetadataInBatch,
6674 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006675{
6676 CameraMetadata camMetadata;
6677 camera_metadata_t *resultMetadata;
6678
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006679 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006680 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6681 * Timestamp is needed because it's used for shutter notify calculation.
6682 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006683 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006684 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006685 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006686 }
6687
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006688 if (pendingRequest.jpegMetadata.entryCount())
6689 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006690
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006691 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6692 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6693 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6694 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6695 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006696 if (mBatchSize == 0) {
6697 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006698 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006699 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006700
Samuel Ha68ba5172016-12-15 18:41:12 -08006701 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6702 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006703 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006704 // DevCamDebug metadata translateFromHalMetadata AF
6705 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6706 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6707 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6708 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6709 }
6710 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6711 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6712 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6713 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6714 }
6715 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6716 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6717 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6718 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6719 }
6720 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6721 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6722 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6723 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6724 }
6725 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6726 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6727 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6728 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6729 }
6730 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6731 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6732 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6733 *DevCamDebug_af_monitor_pdaf_target_pos;
6734 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6735 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6736 }
6737 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6738 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6739 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6740 *DevCamDebug_af_monitor_pdaf_confidence;
6741 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6742 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6743 }
6744 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6745 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6746 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6747 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6748 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6749 }
6750 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6751 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6752 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6753 *DevCamDebug_af_monitor_tof_target_pos;
6754 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6755 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6756 }
6757 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6758 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6759 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6760 *DevCamDebug_af_monitor_tof_confidence;
6761 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6762 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6763 }
6764 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6765 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6766 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6767 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6768 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6769 }
6770 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6771 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6772 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6773 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6774 &fwk_DevCamDebug_af_monitor_type_select, 1);
6775 }
6776 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6777 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6778 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6779 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6780 &fwk_DevCamDebug_af_monitor_refocus, 1);
6781 }
6782 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6783 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6784 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6785 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6786 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6787 }
6788 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6789 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6790 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6791 *DevCamDebug_af_search_pdaf_target_pos;
6792 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6793 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6794 }
6795 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6796 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6797 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6798 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6799 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6800 }
6801 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6802 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6803 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6804 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6805 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6806 }
6807 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6808 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6809 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6810 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6811 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6812 }
6813 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6814 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6815 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6816 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6817 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6818 }
6819 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6820 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6821 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6822 *DevCamDebug_af_search_tof_target_pos;
6823 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6824 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6825 }
6826 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6827 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6828 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6829 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6830 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6831 }
6832 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6833 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6834 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6835 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6836 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6837 }
6838 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6839 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6840 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6841 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6842 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6843 }
6844 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6845 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6846 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6847 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6848 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6849 }
6850 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6851 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6852 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6853 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6854 &fwk_DevCamDebug_af_search_type_select, 1);
6855 }
6856 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6857 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6858 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6859 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6860 &fwk_DevCamDebug_af_search_next_pos, 1);
6861 }
6862 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6863 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6864 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6865 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6866 &fwk_DevCamDebug_af_search_target_pos, 1);
6867 }
6868 // DevCamDebug metadata translateFromHalMetadata AEC
6869 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6870 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6871 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6872 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6873 }
6874 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6875 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6876 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6877 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6878 }
6879 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6880 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6881 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6882 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6883 }
6884 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6885 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6886 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6887 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6888 }
6889 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6890 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6891 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6892 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6893 }
6894 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6895 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6896 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6897 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6898 }
6899 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6900 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6901 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6902 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6903 }
6904 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6905 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6906 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6907 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6908 }
Samuel Ha34229982017-02-17 13:51:11 -08006909 // DevCamDebug metadata translateFromHalMetadata zzHDR
6910 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6911 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6912 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6913 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6914 }
6915 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6916 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006917 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006918 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6919 }
6920 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6921 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6922 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6923 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6924 }
6925 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6926 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006927 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006928 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6929 }
6930 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6931 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6932 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6933 *DevCamDebug_aec_hdr_sensitivity_ratio;
6934 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6935 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6936 }
6937 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6938 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6939 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6940 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6941 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6942 }
6943 // DevCamDebug metadata translateFromHalMetadata ADRC
6944 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6945 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6946 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6947 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6948 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6949 }
6950 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6951 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6952 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6953 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6954 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6955 }
6956 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6957 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6958 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6959 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6960 }
6961 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6962 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6963 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6964 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6965 }
6966 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6967 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6968 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6969 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6970 }
6971 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6972 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6973 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6974 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6975 }
Samuel Habdf4fac2017-07-28 17:21:18 -07006976 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
6977 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
6978 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
6979 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
6980 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
6981 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
6982 }
6983 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
6984 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
6985 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
6986 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
6987 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
6988 }
6989 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
6990 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
6991 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
6992 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
6993 &fwk_DevCamDebug_aec_subject_motion, 1);
6994 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006995 // DevCamDebug metadata translateFromHalMetadata AWB
6996 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6997 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6998 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6999 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
7000 }
7001 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7002 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7003 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7004 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7005 }
7006 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7007 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7008 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7009 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7010 }
7011 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7012 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7013 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7014 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7015 }
7016 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7017 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7018 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7019 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7020 }
7021 }
7022 // atrace_end(ATRACE_TAG_ALWAYS);
7023
Thierry Strudel3d639192016-09-09 11:52:26 -07007024 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7025 int64_t fwk_frame_number = *frame_number;
7026 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7027 }
7028
7029 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7030 int32_t fps_range[2];
7031 fps_range[0] = (int32_t)float_range->min_fps;
7032 fps_range[1] = (int32_t)float_range->max_fps;
7033 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7034 fps_range, 2);
7035 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7036 fps_range[0], fps_range[1]);
7037 }
7038
7039 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7040 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7041 }
7042
7043 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7044 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7045 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7046 *sceneMode);
7047 if (NAME_NOT_FOUND != val) {
7048 uint8_t fwkSceneMode = (uint8_t)val;
7049 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7050 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7051 fwkSceneMode);
7052 }
7053 }
7054
7055 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7056 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7057 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7058 }
7059
7060 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7061 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7062 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7063 }
7064
7065 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7066 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7067 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7068 }
7069
7070 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7071 CAM_INTF_META_EDGE_MODE, metadata) {
7072 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7073 }
7074
7075 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7076 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7077 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7078 }
7079
7080 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7081 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7082 }
7083
7084 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7085 if (0 <= *flashState) {
7086 uint8_t fwk_flashState = (uint8_t) *flashState;
7087 if (!gCamCapability[mCameraId]->flash_available) {
7088 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7089 }
7090 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7091 }
7092 }
7093
7094 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7095 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7096 if (NAME_NOT_FOUND != val) {
7097 uint8_t fwk_flashMode = (uint8_t)val;
7098 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7099 }
7100 }
7101
7102 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7103 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7104 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7105 }
7106
7107 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7108 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7109 }
7110
7111 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7112 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7113 }
7114
7115 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7116 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7117 }
7118
7119 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7120 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7121 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7122 }
7123
7124 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7125 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7126 LOGD("fwk_videoStab = %d", fwk_videoStab);
7127 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7128 } else {
7129 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7130 // and so hardcoding the Video Stab result to OFF mode.
7131 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7132 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007133 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007134 }
7135
7136 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7137 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7138 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7139 }
7140
7141 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7142 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7143 }
7144
Thierry Strudel3d639192016-09-09 11:52:26 -07007145 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7146 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007147 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007148
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007149 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7150 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007151
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007152 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007153 blackLevelAppliedPattern->cam_black_level[0],
7154 blackLevelAppliedPattern->cam_black_level[1],
7155 blackLevelAppliedPattern->cam_black_level[2],
7156 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007157 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7158 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007159
7160#ifndef USE_HAL_3_3
7161 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307162 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007163 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307164 fwk_blackLevelInd[0] /= 16.0;
7165 fwk_blackLevelInd[1] /= 16.0;
7166 fwk_blackLevelInd[2] /= 16.0;
7167 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007168 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7169 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007170#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007171 }
7172
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007173#ifndef USE_HAL_3_3
7174 // Fixed whitelevel is used by ISP/Sensor
7175 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7176 &gCamCapability[mCameraId]->white_level, 1);
7177#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007178
7179 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7180 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7181 int32_t scalerCropRegion[4];
7182 scalerCropRegion[0] = hScalerCropRegion->left;
7183 scalerCropRegion[1] = hScalerCropRegion->top;
7184 scalerCropRegion[2] = hScalerCropRegion->width;
7185 scalerCropRegion[3] = hScalerCropRegion->height;
7186
7187 // Adjust crop region from sensor output coordinate system to active
7188 // array coordinate system.
7189 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7190 scalerCropRegion[2], scalerCropRegion[3]);
7191
7192 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7193 }
7194
7195 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7196 LOGD("sensorExpTime = %lld", *sensorExpTime);
7197 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7198 }
7199
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007200 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7201 LOGD("expTimeBoost = %f", *expTimeBoost);
7202 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7203 }
7204
Thierry Strudel3d639192016-09-09 11:52:26 -07007205 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7206 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7207 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7208 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7209 }
7210
7211 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7212 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7213 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7214 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7215 sensorRollingShutterSkew, 1);
7216 }
7217
7218 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7219 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7220 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7221
7222 //calculate the noise profile based on sensitivity
7223 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7224 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7225 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7226 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7227 noise_profile[i] = noise_profile_S;
7228 noise_profile[i+1] = noise_profile_O;
7229 }
7230 LOGD("noise model entry (S, O) is (%f, %f)",
7231 noise_profile_S, noise_profile_O);
7232 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7233 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7234 }
7235
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007236#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007237 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007238 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007239 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007240 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007241 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7242 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7243 }
7244 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007245#endif
7246
Thierry Strudel3d639192016-09-09 11:52:26 -07007247 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7248 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7249 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7250 }
7251
7252 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7253 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7254 *faceDetectMode);
7255 if (NAME_NOT_FOUND != val) {
7256 uint8_t fwk_faceDetectMode = (uint8_t)val;
7257 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7258
7259 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7260 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7261 CAM_INTF_META_FACE_DETECTION, metadata) {
7262 uint8_t numFaces = MIN(
7263 faceDetectionInfo->num_faces_detected, MAX_ROI);
7264 int32_t faceIds[MAX_ROI];
7265 uint8_t faceScores[MAX_ROI];
7266 int32_t faceRectangles[MAX_ROI * 4];
7267 int32_t faceLandmarks[MAX_ROI * 6];
7268 size_t j = 0, k = 0;
7269
7270 for (size_t i = 0; i < numFaces; i++) {
7271 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7272 // Adjust crop region from sensor output coordinate system to active
7273 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007274 cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
Thierry Strudel3d639192016-09-09 11:52:26 -07007275 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7276 rect.width, rect.height);
7277
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007278 convertToRegions(rect, faceRectangles+j, -1);
Thierry Strudel3d639192016-09-09 11:52:26 -07007279
Jason Lee8ce36fa2017-04-19 19:40:37 -07007280 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7281 "bottom-right (%d, %d)",
7282 faceDetectionInfo->frame_id, i,
7283 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7284 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7285
Thierry Strudel3d639192016-09-09 11:52:26 -07007286 j+= 4;
7287 }
7288 if (numFaces <= 0) {
7289 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7290 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7291 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7292 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7293 }
7294
7295 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7296 numFaces);
7297 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7298 faceRectangles, numFaces * 4U);
7299 if (fwk_faceDetectMode ==
7300 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7301 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7302 CAM_INTF_META_FACE_LANDMARK, metadata) {
7303
7304 for (size_t i = 0; i < numFaces; i++) {
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007305 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
Thierry Strudel3d639192016-09-09 11:52:26 -07007306 // Map the co-ordinate sensor output coordinate system to active
7307 // array coordinate system.
7308 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007309 face_landmarks.left_eye_center.x,
7310 face_landmarks.left_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007311 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007312 face_landmarks.right_eye_center.x,
7313 face_landmarks.right_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007314 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007315 face_landmarks.mouth_center.x,
7316 face_landmarks.mouth_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007317
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007318 convertLandmarks(face_landmarks, faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007319
7320 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7321 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7322 faceDetectionInfo->frame_id, i,
7323 faceLandmarks[k + LEFT_EYE_X],
7324 faceLandmarks[k + LEFT_EYE_Y],
7325 faceLandmarks[k + RIGHT_EYE_X],
7326 faceLandmarks[k + RIGHT_EYE_Y],
7327 faceLandmarks[k + MOUTH_X],
7328 faceLandmarks[k + MOUTH_Y]);
7329
Thierry Strudel04e026f2016-10-10 11:27:36 -07007330 k+= TOTAL_LANDMARK_INDICES;
7331 }
7332 } else {
7333 for (size_t i = 0; i < numFaces; i++) {
7334 setInvalidLandmarks(faceLandmarks+k);
7335 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007336 }
7337 }
7338
Jason Lee49619db2017-04-13 12:07:22 -07007339 for (size_t i = 0; i < numFaces; i++) {
7340 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7341
7342 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7343 faceDetectionInfo->frame_id, i, faceIds[i]);
7344 }
7345
Thierry Strudel3d639192016-09-09 11:52:26 -07007346 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7347 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7348 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007349 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007350 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7351 CAM_INTF_META_FACE_BLINK, metadata) {
7352 uint8_t detected[MAX_ROI];
7353 uint8_t degree[MAX_ROI * 2];
7354 for (size_t i = 0; i < numFaces; i++) {
7355 detected[i] = blinks->blink[i].blink_detected;
7356 degree[2 * i] = blinks->blink[i].left_blink;
7357 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007358
Jason Lee49619db2017-04-13 12:07:22 -07007359 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7360 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7361 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7362 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007363 }
7364 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7365 detected, numFaces);
7366 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7367 degree, numFaces * 2);
7368 }
7369 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7370 CAM_INTF_META_FACE_SMILE, metadata) {
7371 uint8_t degree[MAX_ROI];
7372 uint8_t confidence[MAX_ROI];
7373 for (size_t i = 0; i < numFaces; i++) {
7374 degree[i] = smiles->smile[i].smile_degree;
7375 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007376
Jason Lee49619db2017-04-13 12:07:22 -07007377 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7378 "smile_degree=%d, smile_score=%d",
7379 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007380 }
7381 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7382 degree, numFaces);
7383 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7384 confidence, numFaces);
7385 }
7386 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7387 CAM_INTF_META_FACE_GAZE, metadata) {
7388 int8_t angle[MAX_ROI];
7389 int32_t direction[MAX_ROI * 3];
7390 int8_t degree[MAX_ROI * 2];
7391 for (size_t i = 0; i < numFaces; i++) {
7392 angle[i] = gazes->gaze[i].gaze_angle;
7393 direction[3 * i] = gazes->gaze[i].updown_dir;
7394 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7395 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7396 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7397 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007398
7399 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7400 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7401 "left_right_gaze=%d, top_bottom_gaze=%d",
7402 faceDetectionInfo->frame_id, i, angle[i],
7403 direction[3 * i], direction[3 * i + 1],
7404 direction[3 * i + 2],
7405 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007406 }
7407 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7408 (uint8_t *)angle, numFaces);
7409 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7410 direction, numFaces * 3);
7411 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7412 (uint8_t *)degree, numFaces * 2);
7413 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007414 }
7415 }
7416 }
7417 }
7418
7419 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7420 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007421 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007422 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007423 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007424
Shuzhen Wang14415f52016-11-16 18:26:18 -08007425 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7426 histogramBins = *histBins;
7427 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7428 }
7429
7430 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007431 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7432 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007433 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007434
7435 switch (stats_data->type) {
7436 case CAM_HISTOGRAM_TYPE_BAYER:
7437 switch (stats_data->bayer_stats.data_type) {
7438 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007439 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7440 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007441 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007442 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7443 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007444 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007445 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7446 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007447 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007448 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007449 case CAM_STATS_CHANNEL_R:
7450 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007451 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7452 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007453 }
7454 break;
7455 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007456 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007457 break;
7458 }
7459
Shuzhen Wang14415f52016-11-16 18:26:18 -08007460 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007461 }
7462 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007463 }
7464
7465 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7466 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7467 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7468 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7469 }
7470
7471 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7472 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7473 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7474 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7475 }
7476
7477 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7478 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7479 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7480 CAM_MAX_SHADING_MAP_HEIGHT);
7481 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7482 CAM_MAX_SHADING_MAP_WIDTH);
7483 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7484 lensShadingMap->lens_shading, 4U * map_width * map_height);
7485 }
7486
7487 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7488 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7489 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7490 }
7491
7492 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7493 //Populate CAM_INTF_META_TONEMAP_CURVES
7494 /* ch0 = G, ch 1 = B, ch 2 = R*/
7495 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7496 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7497 tonemap->tonemap_points_cnt,
7498 CAM_MAX_TONEMAP_CURVE_SIZE);
7499 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7500 }
7501
7502 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7503 &tonemap->curves[0].tonemap_points[0][0],
7504 tonemap->tonemap_points_cnt * 2);
7505
7506 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7507 &tonemap->curves[1].tonemap_points[0][0],
7508 tonemap->tonemap_points_cnt * 2);
7509
7510 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7511 &tonemap->curves[2].tonemap_points[0][0],
7512 tonemap->tonemap_points_cnt * 2);
7513 }
7514
7515 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7516 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7517 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7518 CC_GAIN_MAX);
7519 }
7520
7521 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7522 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7523 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7524 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7525 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7526 }
7527
7528 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7529 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7530 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7531 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7532 toneCurve->tonemap_points_cnt,
7533 CAM_MAX_TONEMAP_CURVE_SIZE);
7534 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7535 }
7536 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7537 (float*)toneCurve->curve.tonemap_points,
7538 toneCurve->tonemap_points_cnt * 2);
7539 }
7540
7541 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7542 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7543 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7544 predColorCorrectionGains->gains, 4);
7545 }
7546
7547 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7548 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7549 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7550 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7551 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7552 }
7553
7554 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7555 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7556 }
7557
7558 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7559 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7560 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7561 }
7562
7563 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7564 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7565 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7566 }
7567
7568 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7569 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7570 *effectMode);
7571 if (NAME_NOT_FOUND != val) {
7572 uint8_t fwk_effectMode = (uint8_t)val;
7573 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7574 }
7575 }
7576
7577 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7578 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7579 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7580 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7581 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7582 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7583 }
7584 int32_t fwk_testPatternData[4];
7585 fwk_testPatternData[0] = testPatternData->r;
7586 fwk_testPatternData[3] = testPatternData->b;
7587 switch (gCamCapability[mCameraId]->color_arrangement) {
7588 case CAM_FILTER_ARRANGEMENT_RGGB:
7589 case CAM_FILTER_ARRANGEMENT_GRBG:
7590 fwk_testPatternData[1] = testPatternData->gr;
7591 fwk_testPatternData[2] = testPatternData->gb;
7592 break;
7593 case CAM_FILTER_ARRANGEMENT_GBRG:
7594 case CAM_FILTER_ARRANGEMENT_BGGR:
7595 fwk_testPatternData[2] = testPatternData->gr;
7596 fwk_testPatternData[1] = testPatternData->gb;
7597 break;
7598 default:
7599 LOGE("color arrangement %d is not supported",
7600 gCamCapability[mCameraId]->color_arrangement);
7601 break;
7602 }
7603 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7604 }
7605
7606 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7607 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7608 }
7609
7610 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7611 String8 str((const char *)gps_methods);
7612 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7613 }
7614
7615 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7616 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7617 }
7618
7619 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7620 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7621 }
7622
7623 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7624 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7625 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7626 }
7627
7628 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7629 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7630 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7631 }
7632
7633 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7634 int32_t fwk_thumb_size[2];
7635 fwk_thumb_size[0] = thumb_size->width;
7636 fwk_thumb_size[1] = thumb_size->height;
7637 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7638 }
7639
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007640 // Skip reprocess metadata if there is no input stream.
7641 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7642 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7643 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7644 privateData,
7645 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7646 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007647 }
7648
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007649 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007650 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007651 meteringMode, 1);
7652 }
7653
Thierry Strudel54dc9782017-02-15 12:12:10 -08007654 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7655 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7656 LOGD("hdr_scene_data: %d %f\n",
7657 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7658 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7659 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7660 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7661 &isHdr, 1);
7662 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7663 &isHdrConfidence, 1);
7664 }
7665
7666
7667
Thierry Strudel3d639192016-09-09 11:52:26 -07007668 if (metadata->is_tuning_params_valid) {
7669 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7670 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7671 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7672
7673
7674 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7675 sizeof(uint32_t));
7676 data += sizeof(uint32_t);
7677
7678 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7679 sizeof(uint32_t));
7680 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7681 data += sizeof(uint32_t);
7682
7683 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7684 sizeof(uint32_t));
7685 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7686 data += sizeof(uint32_t);
7687
7688 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7689 sizeof(uint32_t));
7690 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7691 data += sizeof(uint32_t);
7692
7693 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7694 sizeof(uint32_t));
7695 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7696 data += sizeof(uint32_t);
7697
7698 metadata->tuning_params.tuning_mod3_data_size = 0;
7699 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7700 sizeof(uint32_t));
7701 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7702 data += sizeof(uint32_t);
7703
7704 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7705 TUNING_SENSOR_DATA_MAX);
7706 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7707 count);
7708 data += count;
7709
7710 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7711 TUNING_VFE_DATA_MAX);
7712 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7713 count);
7714 data += count;
7715
7716 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7717 TUNING_CPP_DATA_MAX);
7718 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7719 count);
7720 data += count;
7721
7722 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7723 TUNING_CAC_DATA_MAX);
7724 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7725 count);
7726 data += count;
7727
7728 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7729 (int32_t *)(void *)tuning_meta_data_blob,
7730 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7731 }
7732
7733 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7734 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7735 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7736 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7737 NEUTRAL_COL_POINTS);
7738 }
7739
7740 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7741 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7742 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7743 }
7744
7745 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7746 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7747 // Adjust crop region from sensor output coordinate system to active
7748 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007749 cam_rect_t hAeRect = hAeRegions->rect;
7750 mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7751 hAeRect.width, hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007752
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007753 convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
Thierry Strudel3d639192016-09-09 11:52:26 -07007754 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7755 REGIONS_TUPLE_COUNT);
7756 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7757 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007758 hAeRect.left, hAeRect.top, hAeRect.width,
7759 hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007760 }
7761
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007762 if (!pendingRequest.focusStateSent) {
7763 if (pendingRequest.focusStateValid) {
7764 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7765 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007766 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007767 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7768 uint8_t fwk_afState = (uint8_t) *afState;
7769 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7770 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7771 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007772 }
7773 }
7774
Thierry Strudel3d639192016-09-09 11:52:26 -07007775 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7776 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7777 }
7778
7779 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7780 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7781 }
7782
7783 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7784 uint8_t fwk_lensState = *lensState;
7785 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7786 }
7787
Thierry Strudel3d639192016-09-09 11:52:26 -07007788 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007789 uint32_t ab_mode = *hal_ab_mode;
7790 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7791 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7792 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7793 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007794 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007795 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007796 if (NAME_NOT_FOUND != val) {
7797 uint8_t fwk_ab_mode = (uint8_t)val;
7798 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7799 }
7800 }
7801
7802 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7803 int val = lookupFwkName(SCENE_MODES_MAP,
7804 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7805 if (NAME_NOT_FOUND != val) {
7806 uint8_t fwkBestshotMode = (uint8_t)val;
7807 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7808 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7809 } else {
7810 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7811 }
7812 }
7813
7814 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7815 uint8_t fwk_mode = (uint8_t) *mode;
7816 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7817 }
7818
7819 /* Constant metadata values to be update*/
7820 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7821 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7822
7823 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7824 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7825
7826 int32_t hotPixelMap[2];
7827 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7828
7829 // CDS
7830 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7831 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7832 }
7833
Thierry Strudel04e026f2016-10-10 11:27:36 -07007834 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7835 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007836 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007837 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7838 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7839 } else {
7840 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7841 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007842
7843 if(fwk_hdr != curr_hdr_state) {
7844 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7845 if(fwk_hdr)
7846 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7847 else
7848 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7849 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007850 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7851 }
7852
Thierry Strudel54dc9782017-02-15 12:12:10 -08007853 //binning correction
7854 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7855 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7856 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7857 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7858 }
7859
Thierry Strudel04e026f2016-10-10 11:27:36 -07007860 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007861 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007862 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7863 int8_t is_ir_on = 0;
7864
7865 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7866 if(is_ir_on != curr_ir_state) {
7867 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7868 if(is_ir_on)
7869 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7870 else
7871 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7872 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007873 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007874 }
7875
Thierry Strudel269c81a2016-10-12 12:13:59 -07007876 // AEC SPEED
7877 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7878 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7879 }
7880
7881 // AWB SPEED
7882 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7883 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7884 }
7885
Thierry Strudel3d639192016-09-09 11:52:26 -07007886 // TNR
7887 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7888 uint8_t tnr_enable = tnr->denoise_enable;
7889 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007890 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7891 int8_t is_tnr_on = 0;
7892
7893 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7894 if(is_tnr_on != curr_tnr_state) {
7895 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7896 if(is_tnr_on)
7897 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7898 else
7899 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7900 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007901
7902 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7903 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7904 }
7905
7906 // Reprocess crop data
7907 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7908 uint8_t cnt = crop_data->num_of_streams;
7909 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7910 // mm-qcamera-daemon only posts crop_data for streams
7911 // not linked to pproc. So no valid crop metadata is not
7912 // necessarily an error case.
7913 LOGD("No valid crop metadata entries");
7914 } else {
7915 uint32_t reproc_stream_id;
7916 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7917 LOGD("No reprocessible stream found, ignore crop data");
7918 } else {
7919 int rc = NO_ERROR;
7920 Vector<int32_t> roi_map;
7921 int32_t *crop = new int32_t[cnt*4];
7922 if (NULL == crop) {
7923 rc = NO_MEMORY;
7924 }
7925 if (NO_ERROR == rc) {
7926 int32_t streams_found = 0;
7927 for (size_t i = 0; i < cnt; i++) {
7928 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7929 if (pprocDone) {
7930 // HAL already does internal reprocessing,
7931 // either via reprocessing before JPEG encoding,
7932 // or offline postprocessing for pproc bypass case.
7933 crop[0] = 0;
7934 crop[1] = 0;
7935 crop[2] = mInputStreamInfo.dim.width;
7936 crop[3] = mInputStreamInfo.dim.height;
7937 } else {
7938 crop[0] = crop_data->crop_info[i].crop.left;
7939 crop[1] = crop_data->crop_info[i].crop.top;
7940 crop[2] = crop_data->crop_info[i].crop.width;
7941 crop[3] = crop_data->crop_info[i].crop.height;
7942 }
7943 roi_map.add(crop_data->crop_info[i].roi_map.left);
7944 roi_map.add(crop_data->crop_info[i].roi_map.top);
7945 roi_map.add(crop_data->crop_info[i].roi_map.width);
7946 roi_map.add(crop_data->crop_info[i].roi_map.height);
7947 streams_found++;
7948 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7949 crop[0], crop[1], crop[2], crop[3]);
7950 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7951 crop_data->crop_info[i].roi_map.left,
7952 crop_data->crop_info[i].roi_map.top,
7953 crop_data->crop_info[i].roi_map.width,
7954 crop_data->crop_info[i].roi_map.height);
7955 break;
7956
7957 }
7958 }
7959 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7960 &streams_found, 1);
7961 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7962 crop, (size_t)(streams_found * 4));
7963 if (roi_map.array()) {
7964 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7965 roi_map.array(), roi_map.size());
7966 }
7967 }
7968 if (crop) {
7969 delete [] crop;
7970 }
7971 }
7972 }
7973 }
7974
7975 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7976 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7977 // so hardcoding the CAC result to OFF mode.
7978 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7979 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7980 } else {
7981 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7982 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7983 *cacMode);
7984 if (NAME_NOT_FOUND != val) {
7985 uint8_t resultCacMode = (uint8_t)val;
7986 // check whether CAC result from CB is equal to Framework set CAC mode
7987 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007988 if (pendingRequest.fwkCacMode != resultCacMode) {
7989 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07007990 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007991 //Check if CAC is disabled by property
7992 if (m_cacModeDisabled) {
7993 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7994 }
7995
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007996 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007997 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7998 } else {
7999 LOGE("Invalid CAC camera parameter: %d", *cacMode);
8000 }
8001 }
8002 }
8003
8004 // Post blob of cam_cds_data through vendor tag.
8005 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8006 uint8_t cnt = cdsInfo->num_of_streams;
8007 cam_cds_data_t cdsDataOverride;
8008 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8009 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8010 cdsDataOverride.num_of_streams = 1;
8011 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8012 uint32_t reproc_stream_id;
8013 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8014 LOGD("No reprocessible stream found, ignore cds data");
8015 } else {
8016 for (size_t i = 0; i < cnt; i++) {
8017 if (cdsInfo->cds_info[i].stream_id ==
8018 reproc_stream_id) {
8019 cdsDataOverride.cds_info[0].cds_enable =
8020 cdsInfo->cds_info[i].cds_enable;
8021 break;
8022 }
8023 }
8024 }
8025 } else {
8026 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8027 }
8028 camMetadata.update(QCAMERA3_CDS_INFO,
8029 (uint8_t *)&cdsDataOverride,
8030 sizeof(cam_cds_data_t));
8031 }
8032
8033 // Ldaf calibration data
8034 if (!mLdafCalibExist) {
8035 IF_META_AVAILABLE(uint32_t, ldafCalib,
8036 CAM_INTF_META_LDAF_EXIF, metadata) {
8037 mLdafCalibExist = true;
8038 mLdafCalib[0] = ldafCalib[0];
8039 mLdafCalib[1] = ldafCalib[1];
8040 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8041 ldafCalib[0], ldafCalib[1]);
8042 }
8043 }
8044
Thierry Strudel54dc9782017-02-15 12:12:10 -08008045 // EXIF debug data through vendor tag
8046 /*
8047 * Mobicat Mask can assume 3 values:
8048 * 1 refers to Mobicat data,
8049 * 2 refers to Stats Debug and Exif Debug Data
8050 * 3 refers to Mobicat and Stats Debug Data
8051 * We want to make sure that we are sending Exif debug data
8052 * only when Mobicat Mask is 2.
8053 */
8054 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8055 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8056 (uint8_t *)(void *)mExifParams.debug_params,
8057 sizeof(mm_jpeg_debug_exif_params_t));
8058 }
8059
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008060 // Reprocess and DDM debug data through vendor tag
8061 cam_reprocess_info_t repro_info;
8062 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008063 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8064 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008065 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008066 }
8067 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8068 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008069 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008070 }
8071 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8072 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008073 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008074 }
8075 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8076 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008077 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008078 }
8079 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8080 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008081 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008082 }
8083 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008084 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008085 }
8086 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8087 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008088 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008089 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008090 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8091 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8092 }
8093 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8094 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8095 }
8096 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8097 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008098
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008099 // INSTANT AEC MODE
8100 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8101 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8102 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8103 }
8104
Shuzhen Wange763e802016-03-31 10:24:29 -07008105 // AF scene change
8106 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8107 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8108 }
8109
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008110 // Enable ZSL
8111 if (enableZsl != nullptr) {
8112 uint8_t value = *enableZsl ?
8113 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8114 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8115 }
8116
Xu Han821ea9c2017-05-23 09:00:40 -07008117 // OIS Data
8118 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8119 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8120 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8121 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8122 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8123 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8124 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8125 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8126 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8127 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8128 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
Xue Tu2c3e9142017-08-18 16:23:52 -07008129 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8130 frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8131 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8132 frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
Xu Han821ea9c2017-05-23 09:00:40 -07008133 }
8134
Thierry Strudel3d639192016-09-09 11:52:26 -07008135 resultMetadata = camMetadata.release();
8136 return resultMetadata;
8137}
8138
8139/*===========================================================================
8140 * FUNCTION : saveExifParams
8141 *
8142 * DESCRIPTION:
8143 *
8144 * PARAMETERS :
8145 * @metadata : metadata information from callback
8146 *
8147 * RETURN : none
8148 *
8149 *==========================================================================*/
8150void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8151{
8152 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8153 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8154 if (mExifParams.debug_params) {
8155 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8156 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8157 }
8158 }
8159 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8160 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8161 if (mExifParams.debug_params) {
8162 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8163 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8164 }
8165 }
8166 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8167 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8168 if (mExifParams.debug_params) {
8169 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8170 mExifParams.debug_params->af_debug_params_valid = TRUE;
8171 }
8172 }
8173 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8174 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8175 if (mExifParams.debug_params) {
8176 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8177 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8178 }
8179 }
8180 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8181 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8182 if (mExifParams.debug_params) {
8183 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8184 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8185 }
8186 }
8187 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8188 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8189 if (mExifParams.debug_params) {
8190 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8191 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8192 }
8193 }
8194 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8195 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8196 if (mExifParams.debug_params) {
8197 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8198 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8199 }
8200 }
8201 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8202 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8203 if (mExifParams.debug_params) {
8204 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8205 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8206 }
8207 }
8208}
8209
8210/*===========================================================================
8211 * FUNCTION : get3AExifParams
8212 *
8213 * DESCRIPTION:
8214 *
8215 * PARAMETERS : none
8216 *
8217 *
8218 * RETURN : mm_jpeg_exif_params_t
8219 *
8220 *==========================================================================*/
8221mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8222{
8223 return mExifParams;
8224}
8225
8226/*===========================================================================
8227 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8228 *
8229 * DESCRIPTION:
8230 *
8231 * PARAMETERS :
8232 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008233 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8234 * urgent metadata in a batch. Always true for
8235 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008236 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008237 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8238 * i.e. even though it doesn't map to a valid partial
8239 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008240 * RETURN : camera_metadata_t*
8241 * metadata in a format specified by fwk
8242 *==========================================================================*/
8243camera_metadata_t*
8244QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008245 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008246 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008247{
8248 CameraMetadata camMetadata;
8249 camera_metadata_t *resultMetadata;
8250
Shuzhen Wang485e2442017-08-02 12:21:08 -07008251 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008252 /* In batch mode, use empty metadata if this is not the last in batch
8253 */
8254 resultMetadata = allocate_camera_metadata(0, 0);
8255 return resultMetadata;
8256 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008257
8258 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8259 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8260 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8261 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8262 }
8263
8264 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8265 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8266 &aecTrigger->trigger, 1);
8267 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8268 &aecTrigger->trigger_id, 1);
8269 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8270 aecTrigger->trigger);
8271 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8272 aecTrigger->trigger_id);
8273 }
8274
8275 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8276 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8277 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8278 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8279 }
8280
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008281 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8282 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8283 if (NAME_NOT_FOUND != val) {
8284 uint8_t fwkAfMode = (uint8_t)val;
8285 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8286 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8287 } else {
8288 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8289 val);
8290 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008291 }
8292
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008293 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8294 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8295 af_trigger->trigger);
8296 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8297 af_trigger->trigger_id);
8298
8299 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8300 mAfTrigger = *af_trigger;
8301 uint32_t fwk_AfState = (uint32_t) *afState;
8302
8303 // If this is the result for a new trigger, check if there is new early
8304 // af state. If there is, use the last af state for all results
8305 // preceding current partial frame number.
8306 for (auto & pendingRequest : mPendingRequestsList) {
8307 if (pendingRequest.frame_number < frame_number) {
8308 pendingRequest.focusStateValid = true;
8309 pendingRequest.focusState = fwk_AfState;
8310 } else if (pendingRequest.frame_number == frame_number) {
8311 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8312 // Check if early AF state for trigger exists. If yes, send AF state as
8313 // partial result for better latency.
8314 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8315 pendingRequest.focusStateSent = true;
8316 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8317 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8318 frame_number, fwkEarlyAfState);
8319 }
8320 }
8321 }
8322 }
8323 }
8324 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8325 &mAfTrigger.trigger, 1);
8326 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8327
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008328 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8329 /*af regions*/
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008330 cam_rect_t hAfRect = hAfRegions->rect;
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008331 int32_t afRegions[REGIONS_TUPLE_COUNT];
8332 // Adjust crop region from sensor output coordinate system to active
8333 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008334 mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8335 hAfRect.width, hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008336
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008337 convertToRegions(hAfRect, afRegions, hAfRegions->weight);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008338 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8339 REGIONS_TUPLE_COUNT);
8340 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8341 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008342 hAfRect.left, hAfRect.top, hAfRect.width,
8343 hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008344 }
8345
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008346 // AF region confidence
8347 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8348 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8349 }
8350
Thierry Strudel3d639192016-09-09 11:52:26 -07008351 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8352 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8353 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8354 if (NAME_NOT_FOUND != val) {
8355 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8356 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8357 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8358 } else {
8359 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8360 }
8361 }
8362
8363 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8364 uint32_t aeMode = CAM_AE_MODE_MAX;
8365 int32_t flashMode = CAM_FLASH_MODE_MAX;
8366 int32_t redeye = -1;
8367 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8368 aeMode = *pAeMode;
8369 }
8370 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8371 flashMode = *pFlashMode;
8372 }
8373 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8374 redeye = *pRedeye;
8375 }
8376
8377 if (1 == redeye) {
8378 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8379 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8380 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8381 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8382 flashMode);
8383 if (NAME_NOT_FOUND != val) {
8384 fwk_aeMode = (uint8_t)val;
8385 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8386 } else {
8387 LOGE("Unsupported flash mode %d", flashMode);
8388 }
8389 } else if (aeMode == CAM_AE_MODE_ON) {
8390 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8391 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8392 } else if (aeMode == CAM_AE_MODE_OFF) {
8393 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8394 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008395 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8396 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8397 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008398 } else {
8399 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8400 "flashMode:%d, aeMode:%u!!!",
8401 redeye, flashMode, aeMode);
8402 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008403 if (mInstantAEC) {
8404 // Increment frame Idx count untill a bound reached for instant AEC.
8405 mInstantAecFrameIdxCount++;
8406 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8407 CAM_INTF_META_AEC_INFO, metadata) {
8408 LOGH("ae_params->settled = %d",ae_params->settled);
8409 // If AEC settled, or if number of frames reached bound value,
8410 // should reset instant AEC.
8411 if (ae_params->settled ||
8412 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8413 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8414 mInstantAEC = false;
8415 mResetInstantAEC = true;
8416 mInstantAecFrameIdxCount = 0;
8417 }
8418 }
8419 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008420 resultMetadata = camMetadata.release();
8421 return resultMetadata;
8422}
8423
8424/*===========================================================================
8425 * FUNCTION : dumpMetadataToFile
8426 *
8427 * DESCRIPTION: Dumps tuning metadata to file system
8428 *
8429 * PARAMETERS :
8430 * @meta : tuning metadata
8431 * @dumpFrameCount : current dump frame count
8432 * @enabled : Enable mask
8433 *
8434 *==========================================================================*/
8435void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8436 uint32_t &dumpFrameCount,
8437 bool enabled,
8438 const char *type,
8439 uint32_t frameNumber)
8440{
8441 //Some sanity checks
8442 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8443 LOGE("Tuning sensor data size bigger than expected %d: %d",
8444 meta.tuning_sensor_data_size,
8445 TUNING_SENSOR_DATA_MAX);
8446 return;
8447 }
8448
8449 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8450 LOGE("Tuning VFE data size bigger than expected %d: %d",
8451 meta.tuning_vfe_data_size,
8452 TUNING_VFE_DATA_MAX);
8453 return;
8454 }
8455
8456 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8457 LOGE("Tuning CPP data size bigger than expected %d: %d",
8458 meta.tuning_cpp_data_size,
8459 TUNING_CPP_DATA_MAX);
8460 return;
8461 }
8462
8463 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8464 LOGE("Tuning CAC data size bigger than expected %d: %d",
8465 meta.tuning_cac_data_size,
8466 TUNING_CAC_DATA_MAX);
8467 return;
8468 }
8469 //
8470
8471 if(enabled){
8472 char timeBuf[FILENAME_MAX];
8473 char buf[FILENAME_MAX];
8474 memset(buf, 0, sizeof(buf));
8475 memset(timeBuf, 0, sizeof(timeBuf));
8476 time_t current_time;
8477 struct tm * timeinfo;
8478 time (&current_time);
8479 timeinfo = localtime (&current_time);
8480 if (timeinfo != NULL) {
8481 strftime (timeBuf, sizeof(timeBuf),
8482 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8483 }
8484 String8 filePath(timeBuf);
8485 snprintf(buf,
8486 sizeof(buf),
8487 "%dm_%s_%d.bin",
8488 dumpFrameCount,
8489 type,
8490 frameNumber);
8491 filePath.append(buf);
8492 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8493 if (file_fd >= 0) {
8494 ssize_t written_len = 0;
8495 meta.tuning_data_version = TUNING_DATA_VERSION;
8496 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8497 written_len += write(file_fd, data, sizeof(uint32_t));
8498 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8499 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8500 written_len += write(file_fd, data, sizeof(uint32_t));
8501 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8502 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8503 written_len += write(file_fd, data, sizeof(uint32_t));
8504 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8505 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8506 written_len += write(file_fd, data, sizeof(uint32_t));
8507 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8508 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8509 written_len += write(file_fd, data, sizeof(uint32_t));
8510 meta.tuning_mod3_data_size = 0;
8511 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8512 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8513 written_len += write(file_fd, data, sizeof(uint32_t));
8514 size_t total_size = meta.tuning_sensor_data_size;
8515 data = (void *)((uint8_t *)&meta.data);
8516 written_len += write(file_fd, data, total_size);
8517 total_size = meta.tuning_vfe_data_size;
8518 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8519 written_len += write(file_fd, data, total_size);
8520 total_size = meta.tuning_cpp_data_size;
8521 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8522 written_len += write(file_fd, data, total_size);
8523 total_size = meta.tuning_cac_data_size;
8524 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8525 written_len += write(file_fd, data, total_size);
8526 close(file_fd);
8527 }else {
8528 LOGE("fail to open file for metadata dumping");
8529 }
8530 }
8531}
8532
8533/*===========================================================================
8534 * FUNCTION : cleanAndSortStreamInfo
8535 *
8536 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8537 * and sort them such that raw stream is at the end of the list
8538 * This is a workaround for camera daemon constraint.
8539 *
8540 * PARAMETERS : None
8541 *
8542 *==========================================================================*/
8543void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8544{
8545 List<stream_info_t *> newStreamInfo;
8546
8547 /*clean up invalid streams*/
8548 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8549 it != mStreamInfo.end();) {
8550 if(((*it)->status) == INVALID){
8551 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8552 delete channel;
8553 free(*it);
8554 it = mStreamInfo.erase(it);
8555 } else {
8556 it++;
8557 }
8558 }
8559
8560 // Move preview/video/callback/snapshot streams into newList
8561 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8562 it != mStreamInfo.end();) {
8563 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8564 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8565 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8566 newStreamInfo.push_back(*it);
8567 it = mStreamInfo.erase(it);
8568 } else
8569 it++;
8570 }
8571 // Move raw streams into newList
8572 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8573 it != mStreamInfo.end();) {
8574 newStreamInfo.push_back(*it);
8575 it = mStreamInfo.erase(it);
8576 }
8577
8578 mStreamInfo = newStreamInfo;
8579}
8580
8581/*===========================================================================
8582 * FUNCTION : extractJpegMetadata
8583 *
8584 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8585 * JPEG metadata is cached in HAL, and return as part of capture
8586 * result when metadata is returned from camera daemon.
8587 *
8588 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8589 * @request: capture request
8590 *
8591 *==========================================================================*/
8592void QCamera3HardwareInterface::extractJpegMetadata(
8593 CameraMetadata& jpegMetadata,
8594 const camera3_capture_request_t *request)
8595{
8596 CameraMetadata frame_settings;
8597 frame_settings = request->settings;
8598
8599 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8600 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8601 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8602 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8603
8604 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8605 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8606 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8607 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8608
8609 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8610 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8611 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8612 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8613
8614 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8615 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8616 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8617 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8618
8619 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8620 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8621 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8622 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8623
8624 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8625 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8626 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8627 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8628
8629 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8630 int32_t thumbnail_size[2];
8631 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8632 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8633 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8634 int32_t orientation =
8635 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008636 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008637 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8638 int32_t temp;
8639 temp = thumbnail_size[0];
8640 thumbnail_size[0] = thumbnail_size[1];
8641 thumbnail_size[1] = temp;
8642 }
8643 }
8644 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8645 thumbnail_size,
8646 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8647 }
8648
8649}
8650
8651/*===========================================================================
8652 * FUNCTION : convertToRegions
8653 *
8654 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8655 *
8656 * PARAMETERS :
8657 * @rect : cam_rect_t struct to convert
8658 * @region : int32_t destination array
8659 * @weight : if we are converting from cam_area_t, weight is valid
8660 * else weight = -1
8661 *
8662 *==========================================================================*/
8663void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8664 int32_t *region, int weight)
8665{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008666 region[FACE_LEFT] = rect.left;
8667 region[FACE_TOP] = rect.top;
8668 region[FACE_RIGHT] = rect.left + rect.width;
8669 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008670 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008671 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008672 }
8673}
8674
8675/*===========================================================================
8676 * FUNCTION : convertFromRegions
8677 *
8678 * DESCRIPTION: helper method to convert from array to cam_rect_t
8679 *
8680 * PARAMETERS :
8681 * @rect : cam_rect_t struct to convert
8682 * @region : int32_t destination array
8683 * @weight : if we are converting from cam_area_t, weight is valid
8684 * else weight = -1
8685 *
8686 *==========================================================================*/
8687void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008688 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008689{
Thierry Strudel3d639192016-09-09 11:52:26 -07008690 int32_t x_min = frame_settings.find(tag).data.i32[0];
8691 int32_t y_min = frame_settings.find(tag).data.i32[1];
8692 int32_t x_max = frame_settings.find(tag).data.i32[2];
8693 int32_t y_max = frame_settings.find(tag).data.i32[3];
8694 roi.weight = frame_settings.find(tag).data.i32[4];
8695 roi.rect.left = x_min;
8696 roi.rect.top = y_min;
8697 roi.rect.width = x_max - x_min;
8698 roi.rect.height = y_max - y_min;
8699}
8700
8701/*===========================================================================
8702 * FUNCTION : resetIfNeededROI
8703 *
8704 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8705 * crop region
8706 *
8707 * PARAMETERS :
8708 * @roi : cam_area_t struct to resize
8709 * @scalerCropRegion : cam_crop_region_t region to compare against
8710 *
8711 *
8712 *==========================================================================*/
8713bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8714 const cam_crop_region_t* scalerCropRegion)
8715{
8716 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8717 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8718 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8719 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8720
8721 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8722 * without having this check the calculations below to validate if the roi
8723 * is inside scalar crop region will fail resulting in the roi not being
8724 * reset causing algorithm to continue to use stale roi window
8725 */
8726 if (roi->weight == 0) {
8727 return true;
8728 }
8729
8730 if ((roi_x_max < scalerCropRegion->left) ||
8731 // right edge of roi window is left of scalar crop's left edge
8732 (roi_y_max < scalerCropRegion->top) ||
8733 // bottom edge of roi window is above scalar crop's top edge
8734 (roi->rect.left > crop_x_max) ||
8735 // left edge of roi window is beyond(right) of scalar crop's right edge
8736 (roi->rect.top > crop_y_max)){
8737 // top edge of roi windo is above scalar crop's top edge
8738 return false;
8739 }
8740 if (roi->rect.left < scalerCropRegion->left) {
8741 roi->rect.left = scalerCropRegion->left;
8742 }
8743 if (roi->rect.top < scalerCropRegion->top) {
8744 roi->rect.top = scalerCropRegion->top;
8745 }
8746 if (roi_x_max > crop_x_max) {
8747 roi_x_max = crop_x_max;
8748 }
8749 if (roi_y_max > crop_y_max) {
8750 roi_y_max = crop_y_max;
8751 }
8752 roi->rect.width = roi_x_max - roi->rect.left;
8753 roi->rect.height = roi_y_max - roi->rect.top;
8754 return true;
8755}
8756
8757/*===========================================================================
8758 * FUNCTION : convertLandmarks
8759 *
8760 * DESCRIPTION: helper method to extract the landmarks from face detection info
8761 *
8762 * PARAMETERS :
8763 * @landmark_data : input landmark data to be converted
8764 * @landmarks : int32_t destination array
8765 *
8766 *
8767 *==========================================================================*/
8768void QCamera3HardwareInterface::convertLandmarks(
8769 cam_face_landmarks_info_t landmark_data,
8770 int32_t *landmarks)
8771{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008772 if (landmark_data.is_left_eye_valid) {
8773 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8774 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8775 } else {
8776 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8777 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8778 }
8779
8780 if (landmark_data.is_right_eye_valid) {
8781 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8782 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8783 } else {
8784 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8785 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8786 }
8787
8788 if (landmark_data.is_mouth_valid) {
8789 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8790 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8791 } else {
8792 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8793 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8794 }
8795}
8796
8797/*===========================================================================
8798 * FUNCTION : setInvalidLandmarks
8799 *
8800 * DESCRIPTION: helper method to set invalid landmarks
8801 *
8802 * PARAMETERS :
8803 * @landmarks : int32_t destination array
8804 *
8805 *
8806 *==========================================================================*/
8807void QCamera3HardwareInterface::setInvalidLandmarks(
8808 int32_t *landmarks)
8809{
8810 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8811 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8812 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8813 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8814 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8815 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008816}
8817
8818#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008819
8820/*===========================================================================
8821 * FUNCTION : getCapabilities
8822 *
8823 * DESCRIPTION: query camera capability from back-end
8824 *
8825 * PARAMETERS :
8826 * @ops : mm-interface ops structure
8827 * @cam_handle : camera handle for which we need capability
8828 *
8829 * RETURN : ptr type of capability structure
8830 * capability for success
8831 * NULL for failure
8832 *==========================================================================*/
8833cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8834 uint32_t cam_handle)
8835{
8836 int rc = NO_ERROR;
8837 QCamera3HeapMemory *capabilityHeap = NULL;
8838 cam_capability_t *cap_ptr = NULL;
8839
8840 if (ops == NULL) {
8841 LOGE("Invalid arguments");
8842 return NULL;
8843 }
8844
8845 capabilityHeap = new QCamera3HeapMemory(1);
8846 if (capabilityHeap == NULL) {
8847 LOGE("creation of capabilityHeap failed");
8848 return NULL;
8849 }
8850
8851 /* Allocate memory for capability buffer */
8852 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8853 if(rc != OK) {
8854 LOGE("No memory for cappability");
8855 goto allocate_failed;
8856 }
8857
8858 /* Map memory for capability buffer */
8859 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8860
8861 rc = ops->map_buf(cam_handle,
8862 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8863 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8864 if(rc < 0) {
8865 LOGE("failed to map capability buffer");
8866 rc = FAILED_TRANSACTION;
8867 goto map_failed;
8868 }
8869
8870 /* Query Capability */
8871 rc = ops->query_capability(cam_handle);
8872 if(rc < 0) {
8873 LOGE("failed to query capability");
8874 rc = FAILED_TRANSACTION;
8875 goto query_failed;
8876 }
8877
8878 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8879 if (cap_ptr == NULL) {
8880 LOGE("out of memory");
8881 rc = NO_MEMORY;
8882 goto query_failed;
8883 }
8884
8885 memset(cap_ptr, 0, sizeof(cam_capability_t));
8886 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8887
8888 int index;
8889 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8890 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8891 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8892 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8893 }
8894
8895query_failed:
8896 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8897map_failed:
8898 capabilityHeap->deallocate();
8899allocate_failed:
8900 delete capabilityHeap;
8901
8902 if (rc != NO_ERROR) {
8903 return NULL;
8904 } else {
8905 return cap_ptr;
8906 }
8907}
8908
Thierry Strudel3d639192016-09-09 11:52:26 -07008909/*===========================================================================
8910 * FUNCTION : initCapabilities
8911 *
8912 * DESCRIPTION: initialize camera capabilities in static data struct
8913 *
8914 * PARAMETERS :
8915 * @cameraId : camera Id
8916 *
8917 * RETURN : int32_t type of status
8918 * NO_ERROR -- success
8919 * none-zero failure code
8920 *==========================================================================*/
8921int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8922{
8923 int rc = 0;
8924 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008925 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008926
8927 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8928 if (rc) {
8929 LOGE("camera_open failed. rc = %d", rc);
8930 goto open_failed;
8931 }
8932 if (!cameraHandle) {
8933 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8934 goto open_failed;
8935 }
8936
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008937 handle = get_main_camera_handle(cameraHandle->camera_handle);
8938 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8939 if (gCamCapability[cameraId] == NULL) {
8940 rc = FAILED_TRANSACTION;
8941 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008942 }
8943
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008944 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008945 if (is_dual_camera_by_idx(cameraId)) {
8946 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8947 gCamCapability[cameraId]->aux_cam_cap =
8948 getCapabilities(cameraHandle->ops, handle);
8949 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8950 rc = FAILED_TRANSACTION;
8951 free(gCamCapability[cameraId]);
8952 goto failed_op;
8953 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008954
8955 // Copy the main camera capability to main_cam_cap struct
8956 gCamCapability[cameraId]->main_cam_cap =
8957 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8958 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8959 LOGE("out of memory");
8960 rc = NO_MEMORY;
8961 goto failed_op;
8962 }
8963 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8964 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008965 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008966failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008967 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8968 cameraHandle = NULL;
8969open_failed:
8970 return rc;
8971}
8972
8973/*==========================================================================
8974 * FUNCTION : get3Aversion
8975 *
8976 * DESCRIPTION: get the Q3A S/W version
8977 *
8978 * PARAMETERS :
8979 * @sw_version: Reference of Q3A structure which will hold version info upon
8980 * return
8981 *
8982 * RETURN : None
8983 *
8984 *==========================================================================*/
8985void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8986{
8987 if(gCamCapability[mCameraId])
8988 sw_version = gCamCapability[mCameraId]->q3a_version;
8989 else
8990 LOGE("Capability structure NULL!");
8991}
8992
8993
8994/*===========================================================================
8995 * FUNCTION : initParameters
8996 *
8997 * DESCRIPTION: initialize camera parameters
8998 *
8999 * PARAMETERS :
9000 *
9001 * RETURN : int32_t type of status
9002 * NO_ERROR -- success
9003 * none-zero failure code
9004 *==========================================================================*/
9005int QCamera3HardwareInterface::initParameters()
9006{
9007 int rc = 0;
9008
9009 //Allocate Set Param Buffer
9010 mParamHeap = new QCamera3HeapMemory(1);
9011 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9012 if(rc != OK) {
9013 rc = NO_MEMORY;
9014 LOGE("Failed to allocate SETPARM Heap memory");
9015 delete mParamHeap;
9016 mParamHeap = NULL;
9017 return rc;
9018 }
9019
9020 //Map memory for parameters buffer
9021 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9022 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9023 mParamHeap->getFd(0),
9024 sizeof(metadata_buffer_t),
9025 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9026 if(rc < 0) {
9027 LOGE("failed to map SETPARM buffer");
9028 rc = FAILED_TRANSACTION;
9029 mParamHeap->deallocate();
9030 delete mParamHeap;
9031 mParamHeap = NULL;
9032 return rc;
9033 }
9034
9035 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9036
9037 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9038 return rc;
9039}
9040
9041/*===========================================================================
9042 * FUNCTION : deinitParameters
9043 *
9044 * DESCRIPTION: de-initialize camera parameters
9045 *
9046 * PARAMETERS :
9047 *
9048 * RETURN : NONE
9049 *==========================================================================*/
9050void QCamera3HardwareInterface::deinitParameters()
9051{
9052 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9053 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9054
9055 mParamHeap->deallocate();
9056 delete mParamHeap;
9057 mParamHeap = NULL;
9058
9059 mParameters = NULL;
9060
9061 free(mPrevParameters);
9062 mPrevParameters = NULL;
9063}
9064
9065/*===========================================================================
9066 * FUNCTION : calcMaxJpegSize
9067 *
9068 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9069 *
9070 * PARAMETERS :
9071 *
9072 * RETURN : max_jpeg_size
9073 *==========================================================================*/
9074size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9075{
9076 size_t max_jpeg_size = 0;
9077 size_t temp_width, temp_height;
9078 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9079 MAX_SIZES_CNT);
9080 for (size_t i = 0; i < count; i++) {
9081 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9082 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9083 if (temp_width * temp_height > max_jpeg_size ) {
9084 max_jpeg_size = temp_width * temp_height;
9085 }
9086 }
9087 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9088 return max_jpeg_size;
9089}
9090
9091/*===========================================================================
9092 * FUNCTION : getMaxRawSize
9093 *
9094 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9095 *
9096 * PARAMETERS :
9097 *
9098 * RETURN : Largest supported Raw Dimension
9099 *==========================================================================*/
9100cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9101{
9102 int max_width = 0;
9103 cam_dimension_t maxRawSize;
9104
9105 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9106 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9107 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9108 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9109 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9110 }
9111 }
9112 return maxRawSize;
9113}
9114
9115
9116/*===========================================================================
9117 * FUNCTION : calcMaxJpegDim
9118 *
9119 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9120 *
9121 * PARAMETERS :
9122 *
9123 * RETURN : max_jpeg_dim
9124 *==========================================================================*/
9125cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9126{
9127 cam_dimension_t max_jpeg_dim;
9128 cam_dimension_t curr_jpeg_dim;
9129 max_jpeg_dim.width = 0;
9130 max_jpeg_dim.height = 0;
9131 curr_jpeg_dim.width = 0;
9132 curr_jpeg_dim.height = 0;
9133 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9134 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9135 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9136 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9137 max_jpeg_dim.width * max_jpeg_dim.height ) {
9138 max_jpeg_dim.width = curr_jpeg_dim.width;
9139 max_jpeg_dim.height = curr_jpeg_dim.height;
9140 }
9141 }
9142 return max_jpeg_dim;
9143}
9144
9145/*===========================================================================
9146 * FUNCTION : addStreamConfig
9147 *
9148 * DESCRIPTION: adds the stream configuration to the array
9149 *
9150 * PARAMETERS :
9151 * @available_stream_configs : pointer to stream configuration array
9152 * @scalar_format : scalar format
9153 * @dim : configuration dimension
9154 * @config_type : input or output configuration type
9155 *
9156 * RETURN : NONE
9157 *==========================================================================*/
9158void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9159 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9160{
9161 available_stream_configs.add(scalar_format);
9162 available_stream_configs.add(dim.width);
9163 available_stream_configs.add(dim.height);
9164 available_stream_configs.add(config_type);
9165}
9166
9167/*===========================================================================
9168 * FUNCTION : suppportBurstCapture
9169 *
9170 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9171 *
9172 * PARAMETERS :
9173 * @cameraId : camera Id
9174 *
9175 * RETURN : true if camera supports BURST_CAPTURE
9176 * false otherwise
9177 *==========================================================================*/
9178bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9179{
9180 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9181 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9182 const int32_t highResWidth = 3264;
9183 const int32_t highResHeight = 2448;
9184
9185 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9186 // Maximum resolution images cannot be captured at >= 10fps
9187 // -> not supporting BURST_CAPTURE
9188 return false;
9189 }
9190
9191 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9192 // Maximum resolution images can be captured at >= 20fps
9193 // --> supporting BURST_CAPTURE
9194 return true;
9195 }
9196
9197 // Find the smallest highRes resolution, or largest resolution if there is none
9198 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9199 MAX_SIZES_CNT);
9200 size_t highRes = 0;
9201 while ((highRes + 1 < totalCnt) &&
9202 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9203 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9204 highResWidth * highResHeight)) {
9205 highRes++;
9206 }
9207 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9208 return true;
9209 } else {
9210 return false;
9211 }
9212}
9213
9214/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009215 * FUNCTION : getPDStatIndex
9216 *
9217 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9218 *
9219 * PARAMETERS :
9220 * @caps : camera capabilities
9221 *
9222 * RETURN : int32_t type
9223 * non-negative - on success
9224 * -1 - on failure
9225 *==========================================================================*/
9226int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9227 if (nullptr == caps) {
9228 return -1;
9229 }
9230
9231 uint32_t metaRawCount = caps->meta_raw_channel_count;
9232 int32_t ret = -1;
9233 for (size_t i = 0; i < metaRawCount; i++) {
9234 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9235 ret = i;
9236 break;
9237 }
9238 }
9239
9240 return ret;
9241}
9242
9243/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009244 * FUNCTION : initStaticMetadata
9245 *
9246 * DESCRIPTION: initialize the static metadata
9247 *
9248 * PARAMETERS :
9249 * @cameraId : camera Id
9250 *
9251 * RETURN : int32_t type of status
9252 * 0 -- success
9253 * non-zero failure code
9254 *==========================================================================*/
9255int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9256{
9257 int rc = 0;
9258 CameraMetadata staticInfo;
9259 size_t count = 0;
9260 bool limitedDevice = false;
9261 char prop[PROPERTY_VALUE_MAX];
9262 bool supportBurst = false;
9263
9264 supportBurst = supportBurstCapture(cameraId);
9265
9266 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9267 * guaranteed or if min fps of max resolution is less than 20 fps, its
9268 * advertised as limited device*/
9269 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9270 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9271 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9272 !supportBurst;
9273
9274 uint8_t supportedHwLvl = limitedDevice ?
9275 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009276#ifndef USE_HAL_3_3
9277 // LEVEL_3 - This device will support level 3.
9278 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9279#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009280 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009281#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009282
9283 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9284 &supportedHwLvl, 1);
9285
9286 bool facingBack = false;
9287 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9288 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9289 facingBack = true;
9290 }
9291 /*HAL 3 only*/
9292 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9293 &gCamCapability[cameraId]->min_focus_distance, 1);
9294
9295 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9296 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9297
9298 /*should be using focal lengths but sensor doesn't provide that info now*/
9299 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9300 &gCamCapability[cameraId]->focal_length,
9301 1);
9302
9303 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9304 gCamCapability[cameraId]->apertures,
9305 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9306
9307 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9308 gCamCapability[cameraId]->filter_densities,
9309 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9310
9311
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009312 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9313 size_t mode_count =
9314 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9315 for (size_t i = 0; i < mode_count; i++) {
9316 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9317 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009318 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009319 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009320
9321 int32_t lens_shading_map_size[] = {
9322 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9323 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9324 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9325 lens_shading_map_size,
9326 sizeof(lens_shading_map_size)/sizeof(int32_t));
9327
9328 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9329 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9330
9331 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9332 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9333
9334 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9335 &gCamCapability[cameraId]->max_frame_duration, 1);
9336
9337 camera_metadata_rational baseGainFactor = {
9338 gCamCapability[cameraId]->base_gain_factor.numerator,
9339 gCamCapability[cameraId]->base_gain_factor.denominator};
9340 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9341 &baseGainFactor, 1);
9342
9343 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9344 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9345
9346 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9347 gCamCapability[cameraId]->pixel_array_size.height};
9348 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9349 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9350
9351 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9352 gCamCapability[cameraId]->active_array_size.top,
9353 gCamCapability[cameraId]->active_array_size.width,
9354 gCamCapability[cameraId]->active_array_size.height};
9355 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9356 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9357
9358 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9359 &gCamCapability[cameraId]->white_level, 1);
9360
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009361 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9362 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9363 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009364 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009365 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009366
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009367#ifndef USE_HAL_3_3
9368 bool hasBlackRegions = false;
9369 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9370 LOGW("black_region_count: %d is bounded to %d",
9371 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9372 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9373 }
9374 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9375 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9376 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9377 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9378 }
9379 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9380 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9381 hasBlackRegions = true;
9382 }
9383#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009384 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9385 &gCamCapability[cameraId]->flash_charge_duration, 1);
9386
9387 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9388 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9389
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009390 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9391 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9392 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009393 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9394 &timestampSource, 1);
9395
Thierry Strudel54dc9782017-02-15 12:12:10 -08009396 //update histogram vendor data
9397 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009398 &gCamCapability[cameraId]->histogram_size, 1);
9399
Thierry Strudel54dc9782017-02-15 12:12:10 -08009400 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009401 &gCamCapability[cameraId]->max_histogram_count, 1);
9402
Shuzhen Wang14415f52016-11-16 18:26:18 -08009403 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9404 //so that app can request fewer number of bins than the maximum supported.
9405 std::vector<int32_t> histBins;
9406 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9407 histBins.push_back(maxHistBins);
9408 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9409 (maxHistBins & 0x1) == 0) {
9410 histBins.push_back(maxHistBins >> 1);
9411 maxHistBins >>= 1;
9412 }
9413 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9414 histBins.data(), histBins.size());
9415
Thierry Strudel3d639192016-09-09 11:52:26 -07009416 int32_t sharpness_map_size[] = {
9417 gCamCapability[cameraId]->sharpness_map_size.width,
9418 gCamCapability[cameraId]->sharpness_map_size.height};
9419
9420 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9421 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9422
9423 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9424 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9425
Emilian Peev0f3c3162017-03-15 12:57:46 +00009426 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9427 if (0 <= indexPD) {
9428 // Advertise PD stats data as part of the Depth capabilities
9429 int32_t depthWidth =
9430 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9431 int32_t depthHeight =
9432 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009433 int32_t depthStride =
9434 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009435 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9436 assert(0 < depthSamplesCount);
9437 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9438 &depthSamplesCount, 1);
9439
9440 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9441 depthHeight,
9442 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9443 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9444 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9445 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9446 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9447
9448 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9449 depthHeight, 33333333,
9450 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9451 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9452 depthMinDuration,
9453 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9454
9455 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9456 depthHeight, 0,
9457 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9458 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9459 depthStallDuration,
9460 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9461
9462 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9463 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009464
9465 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9466 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9467 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009468 }
9469
Thierry Strudel3d639192016-09-09 11:52:26 -07009470 int32_t scalar_formats[] = {
9471 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9472 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9473 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9474 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9475 HAL_PIXEL_FORMAT_RAW10,
9476 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009477 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9478 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9479 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009480
9481 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9482 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9483 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9484 count, MAX_SIZES_CNT, available_processed_sizes);
9485 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9486 available_processed_sizes, count * 2);
9487
9488 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9489 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9490 makeTable(gCamCapability[cameraId]->raw_dim,
9491 count, MAX_SIZES_CNT, available_raw_sizes);
9492 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9493 available_raw_sizes, count * 2);
9494
9495 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9496 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9497 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9498 count, MAX_SIZES_CNT, available_fps_ranges);
9499 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9500 available_fps_ranges, count * 2);
9501
9502 camera_metadata_rational exposureCompensationStep = {
9503 gCamCapability[cameraId]->exp_compensation_step.numerator,
9504 gCamCapability[cameraId]->exp_compensation_step.denominator};
9505 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9506 &exposureCompensationStep, 1);
9507
9508 Vector<uint8_t> availableVstabModes;
9509 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9510 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009511 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009512 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009513 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009514 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009515 count = IS_TYPE_MAX;
9516 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9517 for (size_t i = 0; i < count; i++) {
9518 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9519 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9520 eisSupported = true;
9521 break;
9522 }
9523 }
9524 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009525 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9526 }
9527 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9528 availableVstabModes.array(), availableVstabModes.size());
9529
9530 /*HAL 1 and HAL 3 common*/
9531 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9532 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9533 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009534 // Cap the max zoom to the max preferred value
9535 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009536 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9537 &maxZoom, 1);
9538
9539 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9540 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9541
9542 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9543 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9544 max3aRegions[2] = 0; /* AF not supported */
9545 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9546 max3aRegions, 3);
9547
9548 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9549 memset(prop, 0, sizeof(prop));
9550 property_get("persist.camera.facedetect", prop, "1");
9551 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9552 LOGD("Support face detection mode: %d",
9553 supportedFaceDetectMode);
9554
9555 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009556 /* support mode should be OFF if max number of face is 0 */
9557 if (maxFaces <= 0) {
9558 supportedFaceDetectMode = 0;
9559 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009560 Vector<uint8_t> availableFaceDetectModes;
9561 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9562 if (supportedFaceDetectMode == 1) {
9563 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9564 } else if (supportedFaceDetectMode == 2) {
9565 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9566 } else if (supportedFaceDetectMode == 3) {
9567 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9568 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9569 } else {
9570 maxFaces = 0;
9571 }
9572 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9573 availableFaceDetectModes.array(),
9574 availableFaceDetectModes.size());
9575 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9576 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009577 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9578 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9579 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009580
9581 int32_t exposureCompensationRange[] = {
9582 gCamCapability[cameraId]->exposure_compensation_min,
9583 gCamCapability[cameraId]->exposure_compensation_max};
9584 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9585 exposureCompensationRange,
9586 sizeof(exposureCompensationRange)/sizeof(int32_t));
9587
9588 uint8_t lensFacing = (facingBack) ?
9589 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9590 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9591
9592 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9593 available_thumbnail_sizes,
9594 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9595
9596 /*all sizes will be clubbed into this tag*/
9597 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9598 /*android.scaler.availableStreamConfigurations*/
9599 Vector<int32_t> available_stream_configs;
9600 cam_dimension_t active_array_dim;
9601 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9602 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009603
9604 /*advertise list of input dimensions supported based on below property.
9605 By default all sizes upto 5MP will be advertised.
9606 Note that the setprop resolution format should be WxH.
9607 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9608 To list all supported sizes, setprop needs to be set with "0x0" */
9609 cam_dimension_t minInputSize = {2592,1944}; //5MP
9610 memset(prop, 0, sizeof(prop));
9611 property_get("persist.camera.input.minsize", prop, "2592x1944");
9612 if (strlen(prop) > 0) {
9613 char *saveptr = NULL;
9614 char *token = strtok_r(prop, "x", &saveptr);
9615 if (token != NULL) {
9616 minInputSize.width = atoi(token);
9617 }
9618 token = strtok_r(NULL, "x", &saveptr);
9619 if (token != NULL) {
9620 minInputSize.height = atoi(token);
9621 }
9622 }
9623
Thierry Strudel3d639192016-09-09 11:52:26 -07009624 /* Add input/output stream configurations for each scalar formats*/
9625 for (size_t j = 0; j < scalar_formats_count; j++) {
9626 switch (scalar_formats[j]) {
9627 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9628 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9629 case HAL_PIXEL_FORMAT_RAW10:
9630 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9631 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9632 addStreamConfig(available_stream_configs, scalar_formats[j],
9633 gCamCapability[cameraId]->raw_dim[i],
9634 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9635 }
9636 break;
9637 case HAL_PIXEL_FORMAT_BLOB:
9638 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9639 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9640 addStreamConfig(available_stream_configs, scalar_formats[j],
9641 gCamCapability[cameraId]->picture_sizes_tbl[i],
9642 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9643 }
9644 break;
9645 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9646 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9647 default:
9648 cam_dimension_t largest_picture_size;
9649 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9650 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9651 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9652 addStreamConfig(available_stream_configs, scalar_formats[j],
9653 gCamCapability[cameraId]->picture_sizes_tbl[i],
9654 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009655 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009656 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9657 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009658 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9659 >= minInputSize.width) || (gCamCapability[cameraId]->
9660 picture_sizes_tbl[i].height >= minInputSize.height)) {
9661 addStreamConfig(available_stream_configs, scalar_formats[j],
9662 gCamCapability[cameraId]->picture_sizes_tbl[i],
9663 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9664 }
9665 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009666 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009667
Thierry Strudel3d639192016-09-09 11:52:26 -07009668 break;
9669 }
9670 }
9671
9672 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9673 available_stream_configs.array(), available_stream_configs.size());
9674 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9675 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9676
9677 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9678 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9679
9680 /* android.scaler.availableMinFrameDurations */
9681 Vector<int64_t> available_min_durations;
9682 for (size_t j = 0; j < scalar_formats_count; j++) {
9683 switch (scalar_formats[j]) {
9684 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9685 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9686 case HAL_PIXEL_FORMAT_RAW10:
9687 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9688 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9689 available_min_durations.add(scalar_formats[j]);
9690 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9691 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9692 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9693 }
9694 break;
9695 default:
9696 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9697 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9698 available_min_durations.add(scalar_formats[j]);
9699 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9700 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9701 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9702 }
9703 break;
9704 }
9705 }
9706 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9707 available_min_durations.array(), available_min_durations.size());
9708
9709 Vector<int32_t> available_hfr_configs;
9710 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9711 int32_t fps = 0;
9712 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9713 case CAM_HFR_MODE_60FPS:
9714 fps = 60;
9715 break;
9716 case CAM_HFR_MODE_90FPS:
9717 fps = 90;
9718 break;
9719 case CAM_HFR_MODE_120FPS:
9720 fps = 120;
9721 break;
9722 case CAM_HFR_MODE_150FPS:
9723 fps = 150;
9724 break;
9725 case CAM_HFR_MODE_180FPS:
9726 fps = 180;
9727 break;
9728 case CAM_HFR_MODE_210FPS:
9729 fps = 210;
9730 break;
9731 case CAM_HFR_MODE_240FPS:
9732 fps = 240;
9733 break;
9734 case CAM_HFR_MODE_480FPS:
9735 fps = 480;
9736 break;
9737 case CAM_HFR_MODE_OFF:
9738 case CAM_HFR_MODE_MAX:
9739 default:
9740 break;
9741 }
9742
9743 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9744 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9745 /* For each HFR frame rate, need to advertise one variable fps range
9746 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9747 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9748 * set by the app. When video recording is started, [120, 120] is
9749 * set. This way sensor configuration does not change when recording
9750 * is started */
9751
9752 /* (width, height, fps_min, fps_max, batch_size_max) */
9753 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9754 j < MAX_SIZES_CNT; j++) {
9755 available_hfr_configs.add(
9756 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9757 available_hfr_configs.add(
9758 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9759 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9760 available_hfr_configs.add(fps);
9761 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9762
9763 /* (width, height, fps_min, fps_max, batch_size_max) */
9764 available_hfr_configs.add(
9765 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9766 available_hfr_configs.add(
9767 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9768 available_hfr_configs.add(fps);
9769 available_hfr_configs.add(fps);
9770 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9771 }
9772 }
9773 }
9774 //Advertise HFR capability only if the property is set
9775 memset(prop, 0, sizeof(prop));
9776 property_get("persist.camera.hal3hfr.enable", prop, "1");
9777 uint8_t hfrEnable = (uint8_t)atoi(prop);
9778
9779 if(hfrEnable && available_hfr_configs.array()) {
9780 staticInfo.update(
9781 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9782 available_hfr_configs.array(), available_hfr_configs.size());
9783 }
9784
9785 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9786 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9787 &max_jpeg_size, 1);
9788
9789 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9790 size_t size = 0;
9791 count = CAM_EFFECT_MODE_MAX;
9792 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9793 for (size_t i = 0; i < count; i++) {
9794 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9795 gCamCapability[cameraId]->supported_effects[i]);
9796 if (NAME_NOT_FOUND != val) {
9797 avail_effects[size] = (uint8_t)val;
9798 size++;
9799 }
9800 }
9801 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9802 avail_effects,
9803 size);
9804
9805 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9806 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9807 size_t supported_scene_modes_cnt = 0;
9808 count = CAM_SCENE_MODE_MAX;
9809 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9810 for (size_t i = 0; i < count; i++) {
9811 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9812 CAM_SCENE_MODE_OFF) {
9813 int val = lookupFwkName(SCENE_MODES_MAP,
9814 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9815 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009816
Thierry Strudel3d639192016-09-09 11:52:26 -07009817 if (NAME_NOT_FOUND != val) {
9818 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9819 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9820 supported_scene_modes_cnt++;
9821 }
9822 }
9823 }
9824 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9825 avail_scene_modes,
9826 supported_scene_modes_cnt);
9827
9828 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9829 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9830 supported_scene_modes_cnt,
9831 CAM_SCENE_MODE_MAX,
9832 scene_mode_overrides,
9833 supported_indexes,
9834 cameraId);
9835
9836 if (supported_scene_modes_cnt == 0) {
9837 supported_scene_modes_cnt = 1;
9838 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9839 }
9840
9841 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9842 scene_mode_overrides, supported_scene_modes_cnt * 3);
9843
9844 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9845 ANDROID_CONTROL_MODE_AUTO,
9846 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9847 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9848 available_control_modes,
9849 3);
9850
9851 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9852 size = 0;
9853 count = CAM_ANTIBANDING_MODE_MAX;
9854 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9855 for (size_t i = 0; i < count; i++) {
9856 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9857 gCamCapability[cameraId]->supported_antibandings[i]);
9858 if (NAME_NOT_FOUND != val) {
9859 avail_antibanding_modes[size] = (uint8_t)val;
9860 size++;
9861 }
9862
9863 }
9864 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9865 avail_antibanding_modes,
9866 size);
9867
9868 uint8_t avail_abberation_modes[] = {
9869 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9870 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9871 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9872 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9873 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9874 if (0 == count) {
9875 // If no aberration correction modes are available for a device, this advertise OFF mode
9876 size = 1;
9877 } else {
9878 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9879 // So, advertize all 3 modes if atleast any one mode is supported as per the
9880 // new M requirement
9881 size = 3;
9882 }
9883 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9884 avail_abberation_modes,
9885 size);
9886
9887 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9888 size = 0;
9889 count = CAM_FOCUS_MODE_MAX;
9890 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9891 for (size_t i = 0; i < count; i++) {
9892 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9893 gCamCapability[cameraId]->supported_focus_modes[i]);
9894 if (NAME_NOT_FOUND != val) {
9895 avail_af_modes[size] = (uint8_t)val;
9896 size++;
9897 }
9898 }
9899 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9900 avail_af_modes,
9901 size);
9902
9903 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9904 size = 0;
9905 count = CAM_WB_MODE_MAX;
9906 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9907 for (size_t i = 0; i < count; i++) {
9908 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9909 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9910 gCamCapability[cameraId]->supported_white_balances[i]);
9911 if (NAME_NOT_FOUND != val) {
9912 avail_awb_modes[size] = (uint8_t)val;
9913 size++;
9914 }
9915 }
9916 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9917 avail_awb_modes,
9918 size);
9919
9920 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9921 count = CAM_FLASH_FIRING_LEVEL_MAX;
9922 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9923 count);
9924 for (size_t i = 0; i < count; i++) {
9925 available_flash_levels[i] =
9926 gCamCapability[cameraId]->supported_firing_levels[i];
9927 }
9928 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9929 available_flash_levels, count);
9930
9931 uint8_t flashAvailable;
9932 if (gCamCapability[cameraId]->flash_available)
9933 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9934 else
9935 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9936 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9937 &flashAvailable, 1);
9938
9939 Vector<uint8_t> avail_ae_modes;
9940 count = CAM_AE_MODE_MAX;
9941 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9942 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009943 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9944 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9945 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9946 }
9947 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009948 }
9949 if (flashAvailable) {
9950 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9951 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9952 }
9953 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9954 avail_ae_modes.array(),
9955 avail_ae_modes.size());
9956
9957 int32_t sensitivity_range[2];
9958 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9959 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9960 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9961 sensitivity_range,
9962 sizeof(sensitivity_range) / sizeof(int32_t));
9963
9964 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9965 &gCamCapability[cameraId]->max_analog_sensitivity,
9966 1);
9967
9968 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9969 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9970 &sensor_orientation,
9971 1);
9972
9973 int32_t max_output_streams[] = {
9974 MAX_STALLING_STREAMS,
9975 MAX_PROCESSED_STREAMS,
9976 MAX_RAW_STREAMS};
9977 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9978 max_output_streams,
9979 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9980
9981 uint8_t avail_leds = 0;
9982 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9983 &avail_leds, 0);
9984
9985 uint8_t focus_dist_calibrated;
9986 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9987 gCamCapability[cameraId]->focus_dist_calibrated);
9988 if (NAME_NOT_FOUND != val) {
9989 focus_dist_calibrated = (uint8_t)val;
9990 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9991 &focus_dist_calibrated, 1);
9992 }
9993
9994 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9995 size = 0;
9996 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9997 MAX_TEST_PATTERN_CNT);
9998 for (size_t i = 0; i < count; i++) {
9999 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
10000 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10001 if (NAME_NOT_FOUND != testpatternMode) {
10002 avail_testpattern_modes[size] = testpatternMode;
10003 size++;
10004 }
10005 }
10006 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10007 avail_testpattern_modes,
10008 size);
10009
10010 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10011 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10012 &max_pipeline_depth,
10013 1);
10014
10015 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10016 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10017 &partial_result_count,
10018 1);
10019
10020 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10021 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10022
10023 Vector<uint8_t> available_capabilities;
10024 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10025 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10026 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10027 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10028 if (supportBurst) {
10029 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10030 }
10031 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10032 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10033 if (hfrEnable && available_hfr_configs.array()) {
10034 available_capabilities.add(
10035 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10036 }
10037
10038 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10039 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10040 }
10041 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10042 available_capabilities.array(),
10043 available_capabilities.size());
10044
10045 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10046 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10047 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10048 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10049
10050 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10051 &aeLockAvailable, 1);
10052
10053 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10054 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10055 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10056 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10057
10058 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10059 &awbLockAvailable, 1);
10060
10061 int32_t max_input_streams = 1;
10062 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10063 &max_input_streams,
10064 1);
10065
10066 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10067 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10068 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10069 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10070 HAL_PIXEL_FORMAT_YCbCr_420_888};
10071 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10072 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10073
10074 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10075 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10076 &max_latency,
10077 1);
10078
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010079#ifndef USE_HAL_3_3
10080 int32_t isp_sensitivity_range[2];
10081 isp_sensitivity_range[0] =
10082 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10083 isp_sensitivity_range[1] =
10084 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10085 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10086 isp_sensitivity_range,
10087 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10088#endif
10089
Thierry Strudel3d639192016-09-09 11:52:26 -070010090 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10091 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10092 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10093 available_hot_pixel_modes,
10094 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10095
10096 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10097 ANDROID_SHADING_MODE_FAST,
10098 ANDROID_SHADING_MODE_HIGH_QUALITY};
10099 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10100 available_shading_modes,
10101 3);
10102
10103 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10104 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10105 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10106 available_lens_shading_map_modes,
10107 2);
10108
10109 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10110 ANDROID_EDGE_MODE_FAST,
10111 ANDROID_EDGE_MODE_HIGH_QUALITY,
10112 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10113 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10114 available_edge_modes,
10115 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10116
10117 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10118 ANDROID_NOISE_REDUCTION_MODE_FAST,
10119 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10120 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10121 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10122 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10123 available_noise_red_modes,
10124 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10125
10126 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10127 ANDROID_TONEMAP_MODE_FAST,
10128 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10129 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10130 available_tonemap_modes,
10131 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10132
10133 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10134 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10135 available_hot_pixel_map_modes,
10136 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10137
10138 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10139 gCamCapability[cameraId]->reference_illuminant1);
10140 if (NAME_NOT_FOUND != val) {
10141 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10142 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10143 }
10144
10145 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10146 gCamCapability[cameraId]->reference_illuminant2);
10147 if (NAME_NOT_FOUND != val) {
10148 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10149 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10150 }
10151
10152 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10153 (void *)gCamCapability[cameraId]->forward_matrix1,
10154 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10155
10156 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10157 (void *)gCamCapability[cameraId]->forward_matrix2,
10158 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10159
10160 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10161 (void *)gCamCapability[cameraId]->color_transform1,
10162 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10163
10164 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10165 (void *)gCamCapability[cameraId]->color_transform2,
10166 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10167
10168 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10169 (void *)gCamCapability[cameraId]->calibration_transform1,
10170 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10171
10172 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10173 (void *)gCamCapability[cameraId]->calibration_transform2,
10174 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10175
10176 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10177 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10178 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10179 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10180 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10181 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10182 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10183 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10184 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10185 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10186 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10187 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10188 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10189 ANDROID_JPEG_GPS_COORDINATES,
10190 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10191 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10192 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10193 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10194 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10195 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10196 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10197 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10198 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10199 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010200#ifndef USE_HAL_3_3
10201 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10202#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010203 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010204 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010205 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10206 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010207 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010208 /* DevCamDebug metadata request_keys_basic */
10209 DEVCAMDEBUG_META_ENABLE,
10210 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010211 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010212 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010213 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010214 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010215 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010216 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010217
10218 size_t request_keys_cnt =
10219 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10220 Vector<int32_t> available_request_keys;
10221 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10222 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10223 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10224 }
10225
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010226 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010227 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010228 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010229 }
10230
Thierry Strudel3d639192016-09-09 11:52:26 -070010231 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10232 available_request_keys.array(), available_request_keys.size());
10233
10234 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10235 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10236 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10237 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10238 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10239 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10240 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10241 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10242 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10243 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10244 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10245 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10246 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10247 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10248 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10249 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10250 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010251 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010252 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10253 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10254 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010255 ANDROID_STATISTICS_FACE_SCORES,
10256#ifndef USE_HAL_3_3
10257 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10258#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010259 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010260 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010261 // DevCamDebug metadata result_keys_basic
10262 DEVCAMDEBUG_META_ENABLE,
10263 // DevCamDebug metadata result_keys AF
10264 DEVCAMDEBUG_AF_LENS_POSITION,
10265 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10266 DEVCAMDEBUG_AF_TOF_DISTANCE,
10267 DEVCAMDEBUG_AF_LUMA,
10268 DEVCAMDEBUG_AF_HAF_STATE,
10269 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10270 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10271 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10272 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10273 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10274 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10275 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10276 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10277 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10278 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10279 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10280 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10281 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10282 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10283 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10284 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10285 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10286 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10287 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10288 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10289 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10290 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10291 // DevCamDebug metadata result_keys AEC
10292 DEVCAMDEBUG_AEC_TARGET_LUMA,
10293 DEVCAMDEBUG_AEC_COMP_LUMA,
10294 DEVCAMDEBUG_AEC_AVG_LUMA,
10295 DEVCAMDEBUG_AEC_CUR_LUMA,
10296 DEVCAMDEBUG_AEC_LINECOUNT,
10297 DEVCAMDEBUG_AEC_REAL_GAIN,
10298 DEVCAMDEBUG_AEC_EXP_INDEX,
10299 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010300 // DevCamDebug metadata result_keys zzHDR
10301 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10302 DEVCAMDEBUG_AEC_L_LINECOUNT,
10303 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10304 DEVCAMDEBUG_AEC_S_LINECOUNT,
10305 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10306 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10307 // DevCamDebug metadata result_keys ADRC
10308 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10309 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10310 DEVCAMDEBUG_AEC_GTM_RATIO,
10311 DEVCAMDEBUG_AEC_LTM_RATIO,
10312 DEVCAMDEBUG_AEC_LA_RATIO,
10313 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010314 // DevCamDebug metadata result_keys AEC MOTION
10315 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10316 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10317 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010318 // DevCamDebug metadata result_keys AWB
10319 DEVCAMDEBUG_AWB_R_GAIN,
10320 DEVCAMDEBUG_AWB_G_GAIN,
10321 DEVCAMDEBUG_AWB_B_GAIN,
10322 DEVCAMDEBUG_AWB_CCT,
10323 DEVCAMDEBUG_AWB_DECISION,
10324 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010325 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10326 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10327 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010328 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010329 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010330 };
10331
Thierry Strudel3d639192016-09-09 11:52:26 -070010332 size_t result_keys_cnt =
10333 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10334
10335 Vector<int32_t> available_result_keys;
10336 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10337 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10338 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10339 }
10340 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10341 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10342 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10343 }
10344 if (supportedFaceDetectMode == 1) {
10345 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10346 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10347 } else if ((supportedFaceDetectMode == 2) ||
10348 (supportedFaceDetectMode == 3)) {
10349 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10350 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10351 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010352#ifndef USE_HAL_3_3
10353 if (hasBlackRegions) {
10354 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10355 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10356 }
10357#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010358
10359 if (gExposeEnableZslKey) {
10360 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chendaf68892017-08-25 12:56:40 -070010361 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010362 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
10363 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010364 }
10365
Thierry Strudel3d639192016-09-09 11:52:26 -070010366 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10367 available_result_keys.array(), available_result_keys.size());
10368
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010369 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010370 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10371 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10372 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10373 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10374 ANDROID_SCALER_CROPPING_TYPE,
10375 ANDROID_SYNC_MAX_LATENCY,
10376 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10377 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10378 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10379 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10380 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10381 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10382 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10383 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10384 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10385 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10386 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10387 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10388 ANDROID_LENS_FACING,
10389 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10390 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10391 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10392 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10393 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10394 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10395 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10396 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10397 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10398 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10399 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10400 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10401 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10402 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10403 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10404 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10405 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10406 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10407 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10408 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010409 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010410 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10411 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10412 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10413 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10414 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10415 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10416 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10417 ANDROID_CONTROL_AVAILABLE_MODES,
10418 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10419 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10420 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10421 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010422 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10423#ifndef USE_HAL_3_3
10424 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10425 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10426#endif
10427 };
10428
10429 Vector<int32_t> available_characteristics_keys;
10430 available_characteristics_keys.appendArray(characteristics_keys_basic,
10431 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10432#ifndef USE_HAL_3_3
10433 if (hasBlackRegions) {
10434 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10435 }
10436#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010437
10438 if (0 <= indexPD) {
10439 int32_t depthKeys[] = {
10440 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10441 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10442 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10443 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10444 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10445 };
10446 available_characteristics_keys.appendArray(depthKeys,
10447 sizeof(depthKeys) / sizeof(depthKeys[0]));
10448 }
10449
Thierry Strudel3d639192016-09-09 11:52:26 -070010450 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010451 available_characteristics_keys.array(),
10452 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010453
10454 /*available stall durations depend on the hw + sw and will be different for different devices */
10455 /*have to add for raw after implementation*/
10456 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10457 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10458
10459 Vector<int64_t> available_stall_durations;
10460 for (uint32_t j = 0; j < stall_formats_count; j++) {
10461 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10462 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10463 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10464 available_stall_durations.add(stall_formats[j]);
10465 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10466 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10467 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10468 }
10469 } else {
10470 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10471 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10472 available_stall_durations.add(stall_formats[j]);
10473 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10474 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10475 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10476 }
10477 }
10478 }
10479 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10480 available_stall_durations.array(),
10481 available_stall_durations.size());
10482
10483 //QCAMERA3_OPAQUE_RAW
10484 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10485 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10486 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10487 case LEGACY_RAW:
10488 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10489 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10490 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10491 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10492 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10493 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10494 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10495 break;
10496 case MIPI_RAW:
10497 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10498 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10499 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10500 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10501 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10502 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10503 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10504 break;
10505 default:
10506 LOGE("unknown opaque_raw_format %d",
10507 gCamCapability[cameraId]->opaque_raw_fmt);
10508 break;
10509 }
10510 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10511
10512 Vector<int32_t> strides;
10513 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10514 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10515 cam_stream_buf_plane_info_t buf_planes;
10516 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10517 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10518 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10519 &gCamCapability[cameraId]->padding_info, &buf_planes);
10520 strides.add(buf_planes.plane_info.mp[0].stride);
10521 }
10522 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10523 strides.size());
10524
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010525 //TBD: remove the following line once backend advertises zzHDR in feature mask
10526 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010527 //Video HDR default
10528 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10529 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010530 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010531 int32_t vhdr_mode[] = {
10532 QCAMERA3_VIDEO_HDR_MODE_OFF,
10533 QCAMERA3_VIDEO_HDR_MODE_ON};
10534
10535 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10536 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10537 vhdr_mode, vhdr_mode_count);
10538 }
10539
Thierry Strudel3d639192016-09-09 11:52:26 -070010540 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10541 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10542 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10543
10544 uint8_t isMonoOnly =
10545 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10546 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10547 &isMonoOnly, 1);
10548
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010549#ifndef USE_HAL_3_3
10550 Vector<int32_t> opaque_size;
10551 for (size_t j = 0; j < scalar_formats_count; j++) {
10552 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10553 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10554 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10555 cam_stream_buf_plane_info_t buf_planes;
10556
10557 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10558 &gCamCapability[cameraId]->padding_info, &buf_planes);
10559
10560 if (rc == 0) {
10561 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10562 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10563 opaque_size.add(buf_planes.plane_info.frame_len);
10564 }else {
10565 LOGE("raw frame calculation failed!");
10566 }
10567 }
10568 }
10569 }
10570
10571 if ((opaque_size.size() > 0) &&
10572 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10573 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10574 else
10575 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10576#endif
10577
Thierry Strudel04e026f2016-10-10 11:27:36 -070010578 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10579 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10580 size = 0;
10581 count = CAM_IR_MODE_MAX;
10582 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10583 for (size_t i = 0; i < count; i++) {
10584 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10585 gCamCapability[cameraId]->supported_ir_modes[i]);
10586 if (NAME_NOT_FOUND != val) {
10587 avail_ir_modes[size] = (int32_t)val;
10588 size++;
10589 }
10590 }
10591 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10592 avail_ir_modes, size);
10593 }
10594
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010595 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10596 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10597 size = 0;
10598 count = CAM_AEC_CONVERGENCE_MAX;
10599 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10600 for (size_t i = 0; i < count; i++) {
10601 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10602 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10603 if (NAME_NOT_FOUND != val) {
10604 available_instant_aec_modes[size] = (int32_t)val;
10605 size++;
10606 }
10607 }
10608 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10609 available_instant_aec_modes, size);
10610 }
10611
Thierry Strudel54dc9782017-02-15 12:12:10 -080010612 int32_t sharpness_range[] = {
10613 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10614 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10615 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10616
10617 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10618 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10619 size = 0;
10620 count = CAM_BINNING_CORRECTION_MODE_MAX;
10621 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10622 for (size_t i = 0; i < count; i++) {
10623 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10624 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10625 gCamCapability[cameraId]->supported_binning_modes[i]);
10626 if (NAME_NOT_FOUND != val) {
10627 avail_binning_modes[size] = (int32_t)val;
10628 size++;
10629 }
10630 }
10631 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10632 avail_binning_modes, size);
10633 }
10634
10635 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10636 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10637 size = 0;
10638 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10639 for (size_t i = 0; i < count; i++) {
10640 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10641 gCamCapability[cameraId]->supported_aec_modes[i]);
10642 if (NAME_NOT_FOUND != val)
10643 available_aec_modes[size++] = val;
10644 }
10645 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10646 available_aec_modes, size);
10647 }
10648
10649 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10650 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10651 size = 0;
10652 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10653 for (size_t i = 0; i < count; i++) {
10654 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10655 gCamCapability[cameraId]->supported_iso_modes[i]);
10656 if (NAME_NOT_FOUND != val)
10657 available_iso_modes[size++] = val;
10658 }
10659 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10660 available_iso_modes, size);
10661 }
10662
10663 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010664 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010665 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10666 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10667 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10668
10669 int32_t available_saturation_range[4];
10670 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10671 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10672 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10673 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10674 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10675 available_saturation_range, 4);
10676
10677 uint8_t is_hdr_values[2];
10678 is_hdr_values[0] = 0;
10679 is_hdr_values[1] = 1;
10680 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10681 is_hdr_values, 2);
10682
10683 float is_hdr_confidence_range[2];
10684 is_hdr_confidence_range[0] = 0.0;
10685 is_hdr_confidence_range[1] = 1.0;
10686 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10687 is_hdr_confidence_range, 2);
10688
Emilian Peev0a972ef2017-03-16 10:25:53 +000010689 size_t eepromLength = strnlen(
10690 reinterpret_cast<const char *>(
10691 gCamCapability[cameraId]->eeprom_version_info),
10692 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10693 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010694 char easelInfo[] = ",E:N";
10695 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10696 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10697 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010698 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
10699 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E:Y" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010700 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010701 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010702 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10703 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10704 }
10705
Thierry Strudel3d639192016-09-09 11:52:26 -070010706 gStaticMetadata[cameraId] = staticInfo.release();
10707 return rc;
10708}
10709
10710/*===========================================================================
10711 * FUNCTION : makeTable
10712 *
10713 * DESCRIPTION: make a table of sizes
10714 *
10715 * PARAMETERS :
10716 *
10717 *
10718 *==========================================================================*/
10719void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10720 size_t max_size, int32_t *sizeTable)
10721{
10722 size_t j = 0;
10723 if (size > max_size) {
10724 size = max_size;
10725 }
10726 for (size_t i = 0; i < size; i++) {
10727 sizeTable[j] = dimTable[i].width;
10728 sizeTable[j+1] = dimTable[i].height;
10729 j+=2;
10730 }
10731}
10732
10733/*===========================================================================
10734 * FUNCTION : makeFPSTable
10735 *
10736 * DESCRIPTION: make a table of fps ranges
10737 *
10738 * PARAMETERS :
10739 *
10740 *==========================================================================*/
10741void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10742 size_t max_size, int32_t *fpsRangesTable)
10743{
10744 size_t j = 0;
10745 if (size > max_size) {
10746 size = max_size;
10747 }
10748 for (size_t i = 0; i < size; i++) {
10749 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10750 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10751 j+=2;
10752 }
10753}
10754
10755/*===========================================================================
10756 * FUNCTION : makeOverridesList
10757 *
10758 * DESCRIPTION: make a list of scene mode overrides
10759 *
10760 * PARAMETERS :
10761 *
10762 *
10763 *==========================================================================*/
10764void QCamera3HardwareInterface::makeOverridesList(
10765 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10766 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10767{
10768 /*daemon will give a list of overrides for all scene modes.
10769 However we should send the fwk only the overrides for the scene modes
10770 supported by the framework*/
10771 size_t j = 0;
10772 if (size > max_size) {
10773 size = max_size;
10774 }
10775 size_t focus_count = CAM_FOCUS_MODE_MAX;
10776 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10777 focus_count);
10778 for (size_t i = 0; i < size; i++) {
10779 bool supt = false;
10780 size_t index = supported_indexes[i];
10781 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10782 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10783 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10784 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10785 overridesTable[index].awb_mode);
10786 if (NAME_NOT_FOUND != val) {
10787 overridesList[j+1] = (uint8_t)val;
10788 }
10789 uint8_t focus_override = overridesTable[index].af_mode;
10790 for (size_t k = 0; k < focus_count; k++) {
10791 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10792 supt = true;
10793 break;
10794 }
10795 }
10796 if (supt) {
10797 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10798 focus_override);
10799 if (NAME_NOT_FOUND != val) {
10800 overridesList[j+2] = (uint8_t)val;
10801 }
10802 } else {
10803 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10804 }
10805 j+=3;
10806 }
10807}
10808
10809/*===========================================================================
10810 * FUNCTION : filterJpegSizes
10811 *
10812 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10813 * could be downscaled to
10814 *
10815 * PARAMETERS :
10816 *
10817 * RETURN : length of jpegSizes array
10818 *==========================================================================*/
10819
10820size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10821 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10822 uint8_t downscale_factor)
10823{
10824 if (0 == downscale_factor) {
10825 downscale_factor = 1;
10826 }
10827
10828 int32_t min_width = active_array_size.width / downscale_factor;
10829 int32_t min_height = active_array_size.height / downscale_factor;
10830 size_t jpegSizesCnt = 0;
10831 if (processedSizesCnt > maxCount) {
10832 processedSizesCnt = maxCount;
10833 }
10834 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10835 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10836 jpegSizes[jpegSizesCnt] = processedSizes[i];
10837 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10838 jpegSizesCnt += 2;
10839 }
10840 }
10841 return jpegSizesCnt;
10842}
10843
10844/*===========================================================================
10845 * FUNCTION : computeNoiseModelEntryS
10846 *
10847 * DESCRIPTION: function to map a given sensitivity to the S noise
10848 * model parameters in the DNG noise model.
10849 *
10850 * PARAMETERS : sens : the sensor sensitivity
10851 *
10852 ** RETURN : S (sensor amplification) noise
10853 *
10854 *==========================================================================*/
10855double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10856 double s = gCamCapability[mCameraId]->gradient_S * sens +
10857 gCamCapability[mCameraId]->offset_S;
10858 return ((s < 0.0) ? 0.0 : s);
10859}
10860
10861/*===========================================================================
10862 * FUNCTION : computeNoiseModelEntryO
10863 *
10864 * DESCRIPTION: function to map a given sensitivity to the O noise
10865 * model parameters in the DNG noise model.
10866 *
10867 * PARAMETERS : sens : the sensor sensitivity
10868 *
10869 ** RETURN : O (sensor readout) noise
10870 *
10871 *==========================================================================*/
10872double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10873 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10874 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10875 1.0 : (1.0 * sens / max_analog_sens);
10876 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10877 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10878 return ((o < 0.0) ? 0.0 : o);
10879}
10880
10881/*===========================================================================
10882 * FUNCTION : getSensorSensitivity
10883 *
10884 * DESCRIPTION: convert iso_mode to an integer value
10885 *
10886 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10887 *
10888 ** RETURN : sensitivity supported by sensor
10889 *
10890 *==========================================================================*/
10891int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10892{
10893 int32_t sensitivity;
10894
10895 switch (iso_mode) {
10896 case CAM_ISO_MODE_100:
10897 sensitivity = 100;
10898 break;
10899 case CAM_ISO_MODE_200:
10900 sensitivity = 200;
10901 break;
10902 case CAM_ISO_MODE_400:
10903 sensitivity = 400;
10904 break;
10905 case CAM_ISO_MODE_800:
10906 sensitivity = 800;
10907 break;
10908 case CAM_ISO_MODE_1600:
10909 sensitivity = 1600;
10910 break;
10911 default:
10912 sensitivity = -1;
10913 break;
10914 }
10915 return sensitivity;
10916}
10917
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010918int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010919 if (gEaselManagerClient == nullptr) {
10920 gEaselManagerClient = EaselManagerClient::create();
10921 if (gEaselManagerClient == nullptr) {
10922 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10923 return -ENODEV;
10924 }
10925 }
10926
10927 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010928 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10929 // to connect to Easel.
10930 bool doNotpowerOnEasel =
10931 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10932
10933 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010934 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10935 return OK;
10936 }
10937
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010938 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010939 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010940 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010941 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010942 return res;
10943 }
10944
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010945 EaselManagerClientOpened = true;
10946
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010947 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010948 if (res != OK) {
10949 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10950 }
10951
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010952 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010953 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010954
10955 // Expose enableZsl key only when HDR+ mode is enabled.
10956 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010957 }
10958
10959 return OK;
10960}
10961
Thierry Strudel3d639192016-09-09 11:52:26 -070010962/*===========================================================================
10963 * FUNCTION : getCamInfo
10964 *
10965 * DESCRIPTION: query camera capabilities
10966 *
10967 * PARAMETERS :
10968 * @cameraId : camera Id
10969 * @info : camera info struct to be filled in with camera capabilities
10970 *
10971 * RETURN : int type of status
10972 * NO_ERROR -- success
10973 * none-zero failure code
10974 *==========================================================================*/
10975int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10976 struct camera_info *info)
10977{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010978 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010979 int rc = 0;
10980
10981 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010982
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010983 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070010984 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010985 rc = initHdrPlusClientLocked();
10986 if (rc != OK) {
10987 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10988 pthread_mutex_unlock(&gCamLock);
10989 return rc;
10990 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010991 }
10992
Thierry Strudel3d639192016-09-09 11:52:26 -070010993 if (NULL == gCamCapability[cameraId]) {
10994 rc = initCapabilities(cameraId);
10995 if (rc < 0) {
10996 pthread_mutex_unlock(&gCamLock);
10997 return rc;
10998 }
10999 }
11000
11001 if (NULL == gStaticMetadata[cameraId]) {
11002 rc = initStaticMetadata(cameraId);
11003 if (rc < 0) {
11004 pthread_mutex_unlock(&gCamLock);
11005 return rc;
11006 }
11007 }
11008
11009 switch(gCamCapability[cameraId]->position) {
11010 case CAM_POSITION_BACK:
11011 case CAM_POSITION_BACK_AUX:
11012 info->facing = CAMERA_FACING_BACK;
11013 break;
11014
11015 case CAM_POSITION_FRONT:
11016 case CAM_POSITION_FRONT_AUX:
11017 info->facing = CAMERA_FACING_FRONT;
11018 break;
11019
11020 default:
11021 LOGE("Unknown position type %d for camera id:%d",
11022 gCamCapability[cameraId]->position, cameraId);
11023 rc = -1;
11024 break;
11025 }
11026
11027
11028 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011029#ifndef USE_HAL_3_3
11030 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11031#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011032 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011033#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011034 info->static_camera_characteristics = gStaticMetadata[cameraId];
11035
11036 //For now assume both cameras can operate independently.
11037 info->conflicting_devices = NULL;
11038 info->conflicting_devices_length = 0;
11039
11040 //resource cost is 100 * MIN(1.0, m/M),
11041 //where m is throughput requirement with maximum stream configuration
11042 //and M is CPP maximum throughput.
11043 float max_fps = 0.0;
11044 for (uint32_t i = 0;
11045 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11046 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11047 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11048 }
11049 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11050 gCamCapability[cameraId]->active_array_size.width *
11051 gCamCapability[cameraId]->active_array_size.height * max_fps /
11052 gCamCapability[cameraId]->max_pixel_bandwidth;
11053 info->resource_cost = 100 * MIN(1.0, ratio);
11054 LOGI("camera %d resource cost is %d", cameraId,
11055 info->resource_cost);
11056
11057 pthread_mutex_unlock(&gCamLock);
11058 return rc;
11059}
11060
11061/*===========================================================================
11062 * FUNCTION : translateCapabilityToMetadata
11063 *
11064 * DESCRIPTION: translate the capability into camera_metadata_t
11065 *
11066 * PARAMETERS : type of the request
11067 *
11068 *
11069 * RETURN : success: camera_metadata_t*
11070 * failure: NULL
11071 *
11072 *==========================================================================*/
11073camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11074{
11075 if (mDefaultMetadata[type] != NULL) {
11076 return mDefaultMetadata[type];
11077 }
11078 //first time we are handling this request
11079 //fill up the metadata structure using the wrapper class
11080 CameraMetadata settings;
11081 //translate from cam_capability_t to camera_metadata_tag_t
11082 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11083 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11084 int32_t defaultRequestID = 0;
11085 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11086
11087 /* OIS disable */
11088 char ois_prop[PROPERTY_VALUE_MAX];
11089 memset(ois_prop, 0, sizeof(ois_prop));
11090 property_get("persist.camera.ois.disable", ois_prop, "0");
11091 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11092
11093 /* Force video to use OIS */
11094 char videoOisProp[PROPERTY_VALUE_MAX];
11095 memset(videoOisProp, 0, sizeof(videoOisProp));
11096 property_get("persist.camera.ois.video", videoOisProp, "1");
11097 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011098
11099 // Hybrid AE enable/disable
11100 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11101 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11102 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
11103 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
11104
Thierry Strudel3d639192016-09-09 11:52:26 -070011105 uint8_t controlIntent = 0;
11106 uint8_t focusMode;
11107 uint8_t vsMode;
11108 uint8_t optStabMode;
11109 uint8_t cacMode;
11110 uint8_t edge_mode;
11111 uint8_t noise_red_mode;
11112 uint8_t tonemap_mode;
11113 bool highQualityModeEntryAvailable = FALSE;
11114 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011115 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011116 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11117 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011118 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011119 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011120 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011121
Thierry Strudel3d639192016-09-09 11:52:26 -070011122 switch (type) {
11123 case CAMERA3_TEMPLATE_PREVIEW:
11124 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11125 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11126 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11127 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11128 edge_mode = ANDROID_EDGE_MODE_FAST;
11129 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11130 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11131 break;
11132 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11133 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11134 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11135 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11136 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11137 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11138 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11139 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11140 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11141 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11142 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11143 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11144 highQualityModeEntryAvailable = TRUE;
11145 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11146 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11147 fastModeEntryAvailable = TRUE;
11148 }
11149 }
11150 if (highQualityModeEntryAvailable) {
11151 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11152 } else if (fastModeEntryAvailable) {
11153 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11154 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011155 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11156 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11157 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011158 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011159 break;
11160 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11161 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11162 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11163 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011164 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11165 edge_mode = ANDROID_EDGE_MODE_FAST;
11166 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11167 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11168 if (forceVideoOis)
11169 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11170 break;
11171 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11172 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11173 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11174 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011175 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11176 edge_mode = ANDROID_EDGE_MODE_FAST;
11177 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11178 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11179 if (forceVideoOis)
11180 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11181 break;
11182 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11183 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11184 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11185 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11186 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11187 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11188 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11189 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11190 break;
11191 case CAMERA3_TEMPLATE_MANUAL:
11192 edge_mode = ANDROID_EDGE_MODE_FAST;
11193 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11194 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11195 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11196 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11197 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11198 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11199 break;
11200 default:
11201 edge_mode = ANDROID_EDGE_MODE_FAST;
11202 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11203 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11204 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11205 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11206 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11207 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11208 break;
11209 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011210 // Set CAC to OFF if underlying device doesn't support
11211 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11212 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11213 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011214 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11215 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11216 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11217 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11218 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11219 }
11220 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011221 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011222 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011223
11224 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11225 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11226 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11227 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11228 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11229 || ois_disable)
11230 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11231 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011232 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011233
11234 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11235 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11236
11237 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11238 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11239
11240 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11241 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11242
11243 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11244 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11245
11246 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11247 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11248
11249 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11250 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11251
11252 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11253 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11254
11255 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11256 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11257
11258 /*flash*/
11259 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11260 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11261
11262 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11263 settings.update(ANDROID_FLASH_FIRING_POWER,
11264 &flashFiringLevel, 1);
11265
11266 /* lens */
11267 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11268 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11269
11270 if (gCamCapability[mCameraId]->filter_densities_count) {
11271 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11272 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11273 gCamCapability[mCameraId]->filter_densities_count);
11274 }
11275
11276 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11277 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11278
Thierry Strudel3d639192016-09-09 11:52:26 -070011279 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11280 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11281
11282 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11283 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11284
11285 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11286 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11287
11288 /* face detection (default to OFF) */
11289 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11290 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11291
Thierry Strudel54dc9782017-02-15 12:12:10 -080011292 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11293 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011294
11295 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11296 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11297
11298 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11299 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11300
Thierry Strudel3d639192016-09-09 11:52:26 -070011301
11302 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11303 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11304
11305 /* Exposure time(Update the Min Exposure Time)*/
11306 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11307 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11308
11309 /* frame duration */
11310 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11311 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11312
11313 /* sensitivity */
11314 static const int32_t default_sensitivity = 100;
11315 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011316#ifndef USE_HAL_3_3
11317 static const int32_t default_isp_sensitivity =
11318 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11319 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11320#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011321
11322 /*edge mode*/
11323 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11324
11325 /*noise reduction mode*/
11326 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11327
11328 /*color correction mode*/
11329 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11330 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11331
11332 /*transform matrix mode*/
11333 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11334
11335 int32_t scaler_crop_region[4];
11336 scaler_crop_region[0] = 0;
11337 scaler_crop_region[1] = 0;
11338 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11339 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11340 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11341
11342 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11343 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11344
11345 /*focus distance*/
11346 float focus_distance = 0.0;
11347 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11348
11349 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011350 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011351 float max_range = 0.0;
11352 float max_fixed_fps = 0.0;
11353 int32_t fps_range[2] = {0, 0};
11354 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11355 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011356 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11357 TEMPLATE_MAX_PREVIEW_FPS) {
11358 continue;
11359 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011360 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11361 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11362 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11363 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11364 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11365 if (range > max_range) {
11366 fps_range[0] =
11367 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11368 fps_range[1] =
11369 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11370 max_range = range;
11371 }
11372 } else {
11373 if (range < 0.01 && max_fixed_fps <
11374 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11375 fps_range[0] =
11376 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11377 fps_range[1] =
11378 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11379 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11380 }
11381 }
11382 }
11383 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11384
11385 /*precapture trigger*/
11386 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11387 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11388
11389 /*af trigger*/
11390 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11391 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11392
11393 /* ae & af regions */
11394 int32_t active_region[] = {
11395 gCamCapability[mCameraId]->active_array_size.left,
11396 gCamCapability[mCameraId]->active_array_size.top,
11397 gCamCapability[mCameraId]->active_array_size.left +
11398 gCamCapability[mCameraId]->active_array_size.width,
11399 gCamCapability[mCameraId]->active_array_size.top +
11400 gCamCapability[mCameraId]->active_array_size.height,
11401 0};
11402 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11403 sizeof(active_region) / sizeof(active_region[0]));
11404 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11405 sizeof(active_region) / sizeof(active_region[0]));
11406
11407 /* black level lock */
11408 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11409 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11410
Thierry Strudel3d639192016-09-09 11:52:26 -070011411 //special defaults for manual template
11412 if (type == CAMERA3_TEMPLATE_MANUAL) {
11413 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11414 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11415
11416 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11417 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11418
11419 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11420 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11421
11422 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11423 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11424
11425 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11426 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11427
11428 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11429 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11430 }
11431
11432
11433 /* TNR
11434 * We'll use this location to determine which modes TNR will be set.
11435 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11436 * This is not to be confused with linking on a per stream basis that decision
11437 * is still on per-session basis and will be handled as part of config stream
11438 */
11439 uint8_t tnr_enable = 0;
11440
11441 if (m_bTnrPreview || m_bTnrVideo) {
11442
11443 switch (type) {
11444 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11445 tnr_enable = 1;
11446 break;
11447
11448 default:
11449 tnr_enable = 0;
11450 break;
11451 }
11452
11453 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11454 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11455 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11456
11457 LOGD("TNR:%d with process plate %d for template:%d",
11458 tnr_enable, tnr_process_type, type);
11459 }
11460
11461 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011462 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011463 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11464
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011465 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011466 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11467
Shuzhen Wang920ea402017-05-03 08:49:39 -070011468 uint8_t related_camera_id = mCameraId;
11469 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011470
11471 /* CDS default */
11472 char prop[PROPERTY_VALUE_MAX];
11473 memset(prop, 0, sizeof(prop));
11474 property_get("persist.camera.CDS", prop, "Auto");
11475 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11476 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11477 if (CAM_CDS_MODE_MAX == cds_mode) {
11478 cds_mode = CAM_CDS_MODE_AUTO;
11479 }
11480
11481 /* Disabling CDS in templates which have TNR enabled*/
11482 if (tnr_enable)
11483 cds_mode = CAM_CDS_MODE_OFF;
11484
11485 int32_t mode = cds_mode;
11486 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011487
Thierry Strudel269c81a2016-10-12 12:13:59 -070011488 /* Manual Convergence AEC Speed is disabled by default*/
11489 float default_aec_speed = 0;
11490 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11491
11492 /* Manual Convergence AWB Speed is disabled by default*/
11493 float default_awb_speed = 0;
11494 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11495
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011496 // Set instant AEC to normal convergence by default
11497 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11498 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11499
Shuzhen Wang19463d72016-03-08 11:09:52 -080011500 /* hybrid ae */
11501 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11502
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011503 if (gExposeEnableZslKey) {
11504 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070011505 int32_t postview = 0;
11506 settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011507 }
11508
Thierry Strudel3d639192016-09-09 11:52:26 -070011509 mDefaultMetadata[type] = settings.release();
11510
11511 return mDefaultMetadata[type];
11512}
11513
11514/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011515 * FUNCTION : getExpectedFrameDuration
11516 *
11517 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11518 * duration
11519 *
11520 * PARAMETERS :
11521 * @request : request settings
11522 * @frameDuration : The maximum frame duration in nanoseconds
11523 *
11524 * RETURN : None
11525 *==========================================================================*/
11526void QCamera3HardwareInterface::getExpectedFrameDuration(
11527 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11528 if (nullptr == frameDuration) {
11529 return;
11530 }
11531
11532 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11533 find_camera_metadata_ro_entry(request,
11534 ANDROID_SENSOR_EXPOSURE_TIME,
11535 &e);
11536 if (e.count > 0) {
11537 *frameDuration = e.data.i64[0];
11538 }
11539 find_camera_metadata_ro_entry(request,
11540 ANDROID_SENSOR_FRAME_DURATION,
11541 &e);
11542 if (e.count > 0) {
11543 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11544 }
11545}
11546
11547/*===========================================================================
11548 * FUNCTION : calculateMaxExpectedDuration
11549 *
11550 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11551 * current camera settings.
11552 *
11553 * PARAMETERS :
11554 * @request : request settings
11555 *
11556 * RETURN : Expected frame duration in nanoseconds.
11557 *==========================================================================*/
11558nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11559 const camera_metadata_t *request) {
11560 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11561 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11562 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11563 if (e.count == 0) {
11564 return maxExpectedDuration;
11565 }
11566
11567 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11568 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11569 }
11570
11571 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11572 return maxExpectedDuration;
11573 }
11574
11575 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11576 if (e.count == 0) {
11577 return maxExpectedDuration;
11578 }
11579
11580 switch (e.data.u8[0]) {
11581 case ANDROID_CONTROL_AE_MODE_OFF:
11582 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11583 break;
11584 default:
11585 find_camera_metadata_ro_entry(request,
11586 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11587 &e);
11588 if (e.count > 1) {
11589 maxExpectedDuration = 1e9 / e.data.u8[0];
11590 }
11591 break;
11592 }
11593
11594 return maxExpectedDuration;
11595}
11596
11597/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011598 * FUNCTION : setFrameParameters
11599 *
11600 * DESCRIPTION: set parameters per frame as requested in the metadata from
11601 * framework
11602 *
11603 * PARAMETERS :
11604 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011605 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011606 * @blob_request: Whether this request is a blob request or not
11607 *
11608 * RETURN : success: NO_ERROR
11609 * failure:
11610 *==========================================================================*/
11611int QCamera3HardwareInterface::setFrameParameters(
11612 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011613 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011614 int blob_request,
11615 uint32_t snapshotStreamId)
11616{
11617 /*translate from camera_metadata_t type to parm_type_t*/
11618 int rc = 0;
11619 int32_t hal_version = CAM_HAL_V3;
11620
11621 clear_metadata_buffer(mParameters);
11622 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11623 LOGE("Failed to set hal version in the parameters");
11624 return BAD_VALUE;
11625 }
11626
11627 /*we need to update the frame number in the parameters*/
11628 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11629 request->frame_number)) {
11630 LOGE("Failed to set the frame number in the parameters");
11631 return BAD_VALUE;
11632 }
11633
11634 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011635 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011636 LOGE("Failed to set stream type mask in the parameters");
11637 return BAD_VALUE;
11638 }
11639
11640 if (mUpdateDebugLevel) {
11641 uint32_t dummyDebugLevel = 0;
11642 /* The value of dummyDebugLevel is irrelavent. On
11643 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11644 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11645 dummyDebugLevel)) {
11646 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11647 return BAD_VALUE;
11648 }
11649 mUpdateDebugLevel = false;
11650 }
11651
11652 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011653 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011654 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11655 if (blob_request)
11656 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11657 }
11658
11659 return rc;
11660}
11661
11662/*===========================================================================
11663 * FUNCTION : setReprocParameters
11664 *
11665 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11666 * return it.
11667 *
11668 * PARAMETERS :
11669 * @request : request that needs to be serviced
11670 *
11671 * RETURN : success: NO_ERROR
11672 * failure:
11673 *==========================================================================*/
11674int32_t QCamera3HardwareInterface::setReprocParameters(
11675 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11676 uint32_t snapshotStreamId)
11677{
11678 /*translate from camera_metadata_t type to parm_type_t*/
11679 int rc = 0;
11680
11681 if (NULL == request->settings){
11682 LOGE("Reprocess settings cannot be NULL");
11683 return BAD_VALUE;
11684 }
11685
11686 if (NULL == reprocParam) {
11687 LOGE("Invalid reprocessing metadata buffer");
11688 return BAD_VALUE;
11689 }
11690 clear_metadata_buffer(reprocParam);
11691
11692 /*we need to update the frame number in the parameters*/
11693 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11694 request->frame_number)) {
11695 LOGE("Failed to set the frame number in the parameters");
11696 return BAD_VALUE;
11697 }
11698
11699 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11700 if (rc < 0) {
11701 LOGE("Failed to translate reproc request");
11702 return rc;
11703 }
11704
11705 CameraMetadata frame_settings;
11706 frame_settings = request->settings;
11707 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11708 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11709 int32_t *crop_count =
11710 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11711 int32_t *crop_data =
11712 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11713 int32_t *roi_map =
11714 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11715 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11716 cam_crop_data_t crop_meta;
11717 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11718 crop_meta.num_of_streams = 1;
11719 crop_meta.crop_info[0].crop.left = crop_data[0];
11720 crop_meta.crop_info[0].crop.top = crop_data[1];
11721 crop_meta.crop_info[0].crop.width = crop_data[2];
11722 crop_meta.crop_info[0].crop.height = crop_data[3];
11723
11724 crop_meta.crop_info[0].roi_map.left =
11725 roi_map[0];
11726 crop_meta.crop_info[0].roi_map.top =
11727 roi_map[1];
11728 crop_meta.crop_info[0].roi_map.width =
11729 roi_map[2];
11730 crop_meta.crop_info[0].roi_map.height =
11731 roi_map[3];
11732
11733 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11734 rc = BAD_VALUE;
11735 }
11736 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11737 request->input_buffer->stream,
11738 crop_meta.crop_info[0].crop.left,
11739 crop_meta.crop_info[0].crop.top,
11740 crop_meta.crop_info[0].crop.width,
11741 crop_meta.crop_info[0].crop.height);
11742 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11743 request->input_buffer->stream,
11744 crop_meta.crop_info[0].roi_map.left,
11745 crop_meta.crop_info[0].roi_map.top,
11746 crop_meta.crop_info[0].roi_map.width,
11747 crop_meta.crop_info[0].roi_map.height);
11748 } else {
11749 LOGE("Invalid reprocess crop count %d!", *crop_count);
11750 }
11751 } else {
11752 LOGE("No crop data from matching output stream");
11753 }
11754
11755 /* These settings are not needed for regular requests so handle them specially for
11756 reprocess requests; information needed for EXIF tags */
11757 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11758 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11759 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11760 if (NAME_NOT_FOUND != val) {
11761 uint32_t flashMode = (uint32_t)val;
11762 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11763 rc = BAD_VALUE;
11764 }
11765 } else {
11766 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11767 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11768 }
11769 } else {
11770 LOGH("No flash mode in reprocess settings");
11771 }
11772
11773 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11774 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11775 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11776 rc = BAD_VALUE;
11777 }
11778 } else {
11779 LOGH("No flash state in reprocess settings");
11780 }
11781
11782 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11783 uint8_t *reprocessFlags =
11784 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11785 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11786 *reprocessFlags)) {
11787 rc = BAD_VALUE;
11788 }
11789 }
11790
Thierry Strudel54dc9782017-02-15 12:12:10 -080011791 // Add exif debug data to internal metadata
11792 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11793 mm_jpeg_debug_exif_params_t *debug_params =
11794 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11795 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11796 // AE
11797 if (debug_params->ae_debug_params_valid == TRUE) {
11798 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11799 debug_params->ae_debug_params);
11800 }
11801 // AWB
11802 if (debug_params->awb_debug_params_valid == TRUE) {
11803 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11804 debug_params->awb_debug_params);
11805 }
11806 // AF
11807 if (debug_params->af_debug_params_valid == TRUE) {
11808 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11809 debug_params->af_debug_params);
11810 }
11811 // ASD
11812 if (debug_params->asd_debug_params_valid == TRUE) {
11813 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11814 debug_params->asd_debug_params);
11815 }
11816 // Stats
11817 if (debug_params->stats_debug_params_valid == TRUE) {
11818 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11819 debug_params->stats_debug_params);
11820 }
11821 // BE Stats
11822 if (debug_params->bestats_debug_params_valid == TRUE) {
11823 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11824 debug_params->bestats_debug_params);
11825 }
11826 // BHIST
11827 if (debug_params->bhist_debug_params_valid == TRUE) {
11828 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11829 debug_params->bhist_debug_params);
11830 }
11831 // 3A Tuning
11832 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11833 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11834 debug_params->q3a_tuning_debug_params);
11835 }
11836 }
11837
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011838 // Add metadata which reprocess needs
11839 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11840 cam_reprocess_info_t *repro_info =
11841 (cam_reprocess_info_t *)frame_settings.find
11842 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011843 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011844 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011845 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011846 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011847 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011848 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011849 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011850 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011851 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011852 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011853 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011854 repro_info->pipeline_flip);
11855 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11856 repro_info->af_roi);
11857 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11858 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011859 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11860 CAM_INTF_PARM_ROTATION metadata then has been added in
11861 translateToHalMetadata. HAL need to keep this new rotation
11862 metadata. Otherwise, the old rotation info saved in the vendor tag
11863 would be used */
11864 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11865 CAM_INTF_PARM_ROTATION, reprocParam) {
11866 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11867 } else {
11868 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011869 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011870 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011871 }
11872
11873 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11874 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11875 roi.width and roi.height would be the final JPEG size.
11876 For now, HAL only checks this for reprocess request */
11877 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11878 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11879 uint8_t *enable =
11880 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11881 if (*enable == TRUE) {
11882 int32_t *crop_data =
11883 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11884 cam_stream_crop_info_t crop_meta;
11885 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11886 crop_meta.stream_id = 0;
11887 crop_meta.crop.left = crop_data[0];
11888 crop_meta.crop.top = crop_data[1];
11889 crop_meta.crop.width = crop_data[2];
11890 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011891 // The JPEG crop roi should match cpp output size
11892 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11893 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11894 crop_meta.roi_map.left = 0;
11895 crop_meta.roi_map.top = 0;
11896 crop_meta.roi_map.width = cpp_crop->crop.width;
11897 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011898 }
11899 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11900 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011901 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011902 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011903 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11904 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011905 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011906 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11907
11908 // Add JPEG scale information
11909 cam_dimension_t scale_dim;
11910 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11911 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11912 int32_t *roi =
11913 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11914 scale_dim.width = roi[2];
11915 scale_dim.height = roi[3];
11916 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11917 scale_dim);
11918 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11919 scale_dim.width, scale_dim.height, mCameraId);
11920 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011921 }
11922 }
11923
11924 return rc;
11925}
11926
11927/*===========================================================================
11928 * FUNCTION : saveRequestSettings
11929 *
11930 * DESCRIPTION: Add any settings that might have changed to the request settings
11931 * and save the settings to be applied on the frame
11932 *
11933 * PARAMETERS :
11934 * @jpegMetadata : the extracted and/or modified jpeg metadata
11935 * @request : request with initial settings
11936 *
11937 * RETURN :
11938 * camera_metadata_t* : pointer to the saved request settings
11939 *==========================================================================*/
11940camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11941 const CameraMetadata &jpegMetadata,
11942 camera3_capture_request_t *request)
11943{
11944 camera_metadata_t *resultMetadata;
11945 CameraMetadata camMetadata;
11946 camMetadata = request->settings;
11947
11948 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11949 int32_t thumbnail_size[2];
11950 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11951 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11952 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11953 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11954 }
11955
11956 if (request->input_buffer != NULL) {
11957 uint8_t reprocessFlags = 1;
11958 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11959 (uint8_t*)&reprocessFlags,
11960 sizeof(reprocessFlags));
11961 }
11962
11963 resultMetadata = camMetadata.release();
11964 return resultMetadata;
11965}
11966
11967/*===========================================================================
11968 * FUNCTION : setHalFpsRange
11969 *
11970 * DESCRIPTION: set FPS range parameter
11971 *
11972 *
11973 * PARAMETERS :
11974 * @settings : Metadata from framework
11975 * @hal_metadata: Metadata buffer
11976 *
11977 *
11978 * RETURN : success: NO_ERROR
11979 * failure:
11980 *==========================================================================*/
11981int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11982 metadata_buffer_t *hal_metadata)
11983{
11984 int32_t rc = NO_ERROR;
11985 cam_fps_range_t fps_range;
11986 fps_range.min_fps = (float)
11987 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11988 fps_range.max_fps = (float)
11989 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11990 fps_range.video_min_fps = fps_range.min_fps;
11991 fps_range.video_max_fps = fps_range.max_fps;
11992
11993 LOGD("aeTargetFpsRange fps: [%f %f]",
11994 fps_range.min_fps, fps_range.max_fps);
11995 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11996 * follows:
11997 * ---------------------------------------------------------------|
11998 * Video stream is absent in configure_streams |
11999 * (Camcorder preview before the first video record |
12000 * ---------------------------------------------------------------|
12001 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12002 * | | | vid_min/max_fps|
12003 * ---------------------------------------------------------------|
12004 * NO | [ 30, 240] | 240 | [240, 240] |
12005 * |-------------|-------------|----------------|
12006 * | [240, 240] | 240 | [240, 240] |
12007 * ---------------------------------------------------------------|
12008 * Video stream is present in configure_streams |
12009 * ---------------------------------------------------------------|
12010 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12011 * | | | vid_min/max_fps|
12012 * ---------------------------------------------------------------|
12013 * NO | [ 30, 240] | 240 | [240, 240] |
12014 * (camcorder prev |-------------|-------------|----------------|
12015 * after video rec | [240, 240] | 240 | [240, 240] |
12016 * is stopped) | | | |
12017 * ---------------------------------------------------------------|
12018 * YES | [ 30, 240] | 240 | [240, 240] |
12019 * |-------------|-------------|----------------|
12020 * | [240, 240] | 240 | [240, 240] |
12021 * ---------------------------------------------------------------|
12022 * When Video stream is absent in configure_streams,
12023 * preview fps = sensor_fps / batchsize
12024 * Eg: for 240fps at batchSize 4, preview = 60fps
12025 * for 120fps at batchSize 4, preview = 30fps
12026 *
12027 * When video stream is present in configure_streams, preview fps is as per
12028 * the ratio of preview buffers to video buffers requested in process
12029 * capture request
12030 */
12031 mBatchSize = 0;
12032 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12033 fps_range.min_fps = fps_range.video_max_fps;
12034 fps_range.video_min_fps = fps_range.video_max_fps;
12035 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12036 fps_range.max_fps);
12037 if (NAME_NOT_FOUND != val) {
12038 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12039 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12040 return BAD_VALUE;
12041 }
12042
12043 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12044 /* If batchmode is currently in progress and the fps changes,
12045 * set the flag to restart the sensor */
12046 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12047 (mHFRVideoFps != fps_range.max_fps)) {
12048 mNeedSensorRestart = true;
12049 }
12050 mHFRVideoFps = fps_range.max_fps;
12051 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12052 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12053 mBatchSize = MAX_HFR_BATCH_SIZE;
12054 }
12055 }
12056 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12057
12058 }
12059 } else {
12060 /* HFR mode is session param in backend/ISP. This should be reset when
12061 * in non-HFR mode */
12062 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12063 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12064 return BAD_VALUE;
12065 }
12066 }
12067 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12068 return BAD_VALUE;
12069 }
12070 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12071 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12072 return rc;
12073}
12074
12075/*===========================================================================
12076 * FUNCTION : translateToHalMetadata
12077 *
12078 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12079 *
12080 *
12081 * PARAMETERS :
12082 * @request : request sent from framework
12083 *
12084 *
12085 * RETURN : success: NO_ERROR
12086 * failure:
12087 *==========================================================================*/
12088int QCamera3HardwareInterface::translateToHalMetadata
12089 (const camera3_capture_request_t *request,
12090 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012091 uint32_t snapshotStreamId) {
12092 if (request == nullptr || hal_metadata == nullptr) {
12093 return BAD_VALUE;
12094 }
12095
12096 int64_t minFrameDuration = getMinFrameDuration(request);
12097
12098 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12099 minFrameDuration);
12100}
12101
12102int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12103 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12104 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12105
Thierry Strudel3d639192016-09-09 11:52:26 -070012106 int rc = 0;
12107 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012108 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012109
12110 /* Do not change the order of the following list unless you know what you are
12111 * doing.
12112 * The order is laid out in such a way that parameters in the front of the table
12113 * may be used to override the parameters later in the table. Examples are:
12114 * 1. META_MODE should precede AEC/AWB/AF MODE
12115 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12116 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12117 * 4. Any mode should precede it's corresponding settings
12118 */
12119 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12120 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12121 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12122 rc = BAD_VALUE;
12123 }
12124 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12125 if (rc != NO_ERROR) {
12126 LOGE("extractSceneMode failed");
12127 }
12128 }
12129
12130 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12131 uint8_t fwk_aeMode =
12132 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12133 uint8_t aeMode;
12134 int32_t redeye;
12135
12136 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12137 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012138 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12139 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012140 } else {
12141 aeMode = CAM_AE_MODE_ON;
12142 }
12143 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12144 redeye = 1;
12145 } else {
12146 redeye = 0;
12147 }
12148
12149 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12150 fwk_aeMode);
12151 if (NAME_NOT_FOUND != val) {
12152 int32_t flashMode = (int32_t)val;
12153 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12154 }
12155
12156 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12157 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12158 rc = BAD_VALUE;
12159 }
12160 }
12161
12162 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12163 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12164 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12165 fwk_whiteLevel);
12166 if (NAME_NOT_FOUND != val) {
12167 uint8_t whiteLevel = (uint8_t)val;
12168 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12169 rc = BAD_VALUE;
12170 }
12171 }
12172 }
12173
12174 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12175 uint8_t fwk_cacMode =
12176 frame_settings.find(
12177 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12178 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12179 fwk_cacMode);
12180 if (NAME_NOT_FOUND != val) {
12181 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12182 bool entryAvailable = FALSE;
12183 // Check whether Frameworks set CAC mode is supported in device or not
12184 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12185 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12186 entryAvailable = TRUE;
12187 break;
12188 }
12189 }
12190 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12191 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12192 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12193 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12194 if (entryAvailable == FALSE) {
12195 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12196 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12197 } else {
12198 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12199 // High is not supported and so set the FAST as spec say's underlying
12200 // device implementation can be the same for both modes.
12201 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12202 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12203 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12204 // in order to avoid the fps drop due to high quality
12205 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12206 } else {
12207 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12208 }
12209 }
12210 }
12211 LOGD("Final cacMode is %d", cacMode);
12212 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12213 rc = BAD_VALUE;
12214 }
12215 } else {
12216 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12217 }
12218 }
12219
Jason Lee84ae9972017-02-24 13:24:24 -080012220 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012221 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012222 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012223 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012224 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12225 fwk_focusMode);
12226 if (NAME_NOT_FOUND != val) {
12227 uint8_t focusMode = (uint8_t)val;
12228 LOGD("set focus mode %d", focusMode);
12229 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12230 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12231 rc = BAD_VALUE;
12232 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012233 }
12234 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012235 } else {
12236 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12237 LOGE("Focus forced to infinity %d", focusMode);
12238 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12239 rc = BAD_VALUE;
12240 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012241 }
12242
Jason Lee84ae9972017-02-24 13:24:24 -080012243 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12244 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012245 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12246 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12247 focalDistance)) {
12248 rc = BAD_VALUE;
12249 }
12250 }
12251
12252 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12253 uint8_t fwk_antibandingMode =
12254 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12255 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12256 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12257 if (NAME_NOT_FOUND != val) {
12258 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012259 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12260 if (m60HzZone) {
12261 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12262 } else {
12263 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12264 }
12265 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012266 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12267 hal_antibandingMode)) {
12268 rc = BAD_VALUE;
12269 }
12270 }
12271 }
12272
12273 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12274 int32_t expCompensation = frame_settings.find(
12275 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12276 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12277 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12278 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12279 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012280 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012281 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12282 expCompensation)) {
12283 rc = BAD_VALUE;
12284 }
12285 }
12286
12287 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12288 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12289 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12290 rc = BAD_VALUE;
12291 }
12292 }
12293 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12294 rc = setHalFpsRange(frame_settings, hal_metadata);
12295 if (rc != NO_ERROR) {
12296 LOGE("setHalFpsRange failed");
12297 }
12298 }
12299
12300 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12301 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12302 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12303 rc = BAD_VALUE;
12304 }
12305 }
12306
12307 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12308 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12309 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12310 fwk_effectMode);
12311 if (NAME_NOT_FOUND != val) {
12312 uint8_t effectMode = (uint8_t)val;
12313 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12314 rc = BAD_VALUE;
12315 }
12316 }
12317 }
12318
12319 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12320 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12321 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12322 colorCorrectMode)) {
12323 rc = BAD_VALUE;
12324 }
12325 }
12326
12327 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12328 cam_color_correct_gains_t colorCorrectGains;
12329 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12330 colorCorrectGains.gains[i] =
12331 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12332 }
12333 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12334 colorCorrectGains)) {
12335 rc = BAD_VALUE;
12336 }
12337 }
12338
12339 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12340 cam_color_correct_matrix_t colorCorrectTransform;
12341 cam_rational_type_t transform_elem;
12342 size_t num = 0;
12343 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12344 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12345 transform_elem.numerator =
12346 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12347 transform_elem.denominator =
12348 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12349 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12350 num++;
12351 }
12352 }
12353 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12354 colorCorrectTransform)) {
12355 rc = BAD_VALUE;
12356 }
12357 }
12358
12359 cam_trigger_t aecTrigger;
12360 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12361 aecTrigger.trigger_id = -1;
12362 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12363 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12364 aecTrigger.trigger =
12365 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12366 aecTrigger.trigger_id =
12367 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12368 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12369 aecTrigger)) {
12370 rc = BAD_VALUE;
12371 }
12372 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12373 aecTrigger.trigger, aecTrigger.trigger_id);
12374 }
12375
12376 /*af_trigger must come with a trigger id*/
12377 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12378 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12379 cam_trigger_t af_trigger;
12380 af_trigger.trigger =
12381 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12382 af_trigger.trigger_id =
12383 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12384 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12385 rc = BAD_VALUE;
12386 }
12387 LOGD("AfTrigger: %d AfTriggerID: %d",
12388 af_trigger.trigger, af_trigger.trigger_id);
12389 }
12390
12391 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12392 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12393 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12394 rc = BAD_VALUE;
12395 }
12396 }
12397 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12398 cam_edge_application_t edge_application;
12399 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012400
Thierry Strudel3d639192016-09-09 11:52:26 -070012401 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12402 edge_application.sharpness = 0;
12403 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012404 edge_application.sharpness =
12405 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12406 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12407 int32_t sharpness =
12408 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12409 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12410 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12411 LOGD("Setting edge mode sharpness %d", sharpness);
12412 edge_application.sharpness = sharpness;
12413 }
12414 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012415 }
12416 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12417 rc = BAD_VALUE;
12418 }
12419 }
12420
12421 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12422 int32_t respectFlashMode = 1;
12423 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12424 uint8_t fwk_aeMode =
12425 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012426 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12427 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12428 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012429 respectFlashMode = 0;
12430 LOGH("AE Mode controls flash, ignore android.flash.mode");
12431 }
12432 }
12433 if (respectFlashMode) {
12434 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12435 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12436 LOGH("flash mode after mapping %d", val);
12437 // To check: CAM_INTF_META_FLASH_MODE usage
12438 if (NAME_NOT_FOUND != val) {
12439 uint8_t flashMode = (uint8_t)val;
12440 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12441 rc = BAD_VALUE;
12442 }
12443 }
12444 }
12445 }
12446
12447 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12448 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12449 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12450 rc = BAD_VALUE;
12451 }
12452 }
12453
12454 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12455 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12456 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12457 flashFiringTime)) {
12458 rc = BAD_VALUE;
12459 }
12460 }
12461
12462 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12463 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12464 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12465 hotPixelMode)) {
12466 rc = BAD_VALUE;
12467 }
12468 }
12469
12470 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12471 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12472 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12473 lensAperture)) {
12474 rc = BAD_VALUE;
12475 }
12476 }
12477
12478 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12479 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12480 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12481 filterDensity)) {
12482 rc = BAD_VALUE;
12483 }
12484 }
12485
12486 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12487 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12488 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12489 focalLength)) {
12490 rc = BAD_VALUE;
12491 }
12492 }
12493
12494 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12495 uint8_t optStabMode =
12496 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12497 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12498 optStabMode)) {
12499 rc = BAD_VALUE;
12500 }
12501 }
12502
12503 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12504 uint8_t videoStabMode =
12505 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12506 LOGD("videoStabMode from APP = %d", videoStabMode);
12507 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12508 videoStabMode)) {
12509 rc = BAD_VALUE;
12510 }
12511 }
12512
12513
12514 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12515 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12516 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12517 noiseRedMode)) {
12518 rc = BAD_VALUE;
12519 }
12520 }
12521
12522 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12523 float reprocessEffectiveExposureFactor =
12524 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12525 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12526 reprocessEffectiveExposureFactor)) {
12527 rc = BAD_VALUE;
12528 }
12529 }
12530
12531 cam_crop_region_t scalerCropRegion;
12532 bool scalerCropSet = false;
12533 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12534 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12535 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12536 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12537 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12538
12539 // Map coordinate system from active array to sensor output.
12540 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12541 scalerCropRegion.width, scalerCropRegion.height);
12542
12543 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12544 scalerCropRegion)) {
12545 rc = BAD_VALUE;
12546 }
12547 scalerCropSet = true;
12548 }
12549
12550 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12551 int64_t sensorExpTime =
12552 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12553 LOGD("setting sensorExpTime %lld", sensorExpTime);
12554 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12555 sensorExpTime)) {
12556 rc = BAD_VALUE;
12557 }
12558 }
12559
12560 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12561 int64_t sensorFrameDuration =
12562 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012563 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12564 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12565 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12566 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12567 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12568 sensorFrameDuration)) {
12569 rc = BAD_VALUE;
12570 }
12571 }
12572
12573 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12574 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12575 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12576 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12577 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12578 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12579 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12580 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12581 sensorSensitivity)) {
12582 rc = BAD_VALUE;
12583 }
12584 }
12585
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012586#ifndef USE_HAL_3_3
12587 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12588 int32_t ispSensitivity =
12589 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12590 if (ispSensitivity <
12591 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12592 ispSensitivity =
12593 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12594 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12595 }
12596 if (ispSensitivity >
12597 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12598 ispSensitivity =
12599 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12600 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12601 }
12602 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12603 ispSensitivity)) {
12604 rc = BAD_VALUE;
12605 }
12606 }
12607#endif
12608
Thierry Strudel3d639192016-09-09 11:52:26 -070012609 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12610 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12611 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12612 rc = BAD_VALUE;
12613 }
12614 }
12615
12616 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12617 uint8_t fwk_facedetectMode =
12618 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12619
12620 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12621 fwk_facedetectMode);
12622
12623 if (NAME_NOT_FOUND != val) {
12624 uint8_t facedetectMode = (uint8_t)val;
12625 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12626 facedetectMode)) {
12627 rc = BAD_VALUE;
12628 }
12629 }
12630 }
12631
Thierry Strudel54dc9782017-02-15 12:12:10 -080012632 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012633 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012634 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012635 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12636 histogramMode)) {
12637 rc = BAD_VALUE;
12638 }
12639 }
12640
12641 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12642 uint8_t sharpnessMapMode =
12643 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12644 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12645 sharpnessMapMode)) {
12646 rc = BAD_VALUE;
12647 }
12648 }
12649
12650 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12651 uint8_t tonemapMode =
12652 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12653 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12654 rc = BAD_VALUE;
12655 }
12656 }
12657 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12658 /*All tonemap channels will have the same number of points*/
12659 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12660 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12661 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12662 cam_rgb_tonemap_curves tonemapCurves;
12663 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12664 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12665 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12666 tonemapCurves.tonemap_points_cnt,
12667 CAM_MAX_TONEMAP_CURVE_SIZE);
12668 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12669 }
12670
12671 /* ch0 = G*/
12672 size_t point = 0;
12673 cam_tonemap_curve_t tonemapCurveGreen;
12674 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12675 for (size_t j = 0; j < 2; j++) {
12676 tonemapCurveGreen.tonemap_points[i][j] =
12677 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12678 point++;
12679 }
12680 }
12681 tonemapCurves.curves[0] = tonemapCurveGreen;
12682
12683 /* ch 1 = B */
12684 point = 0;
12685 cam_tonemap_curve_t tonemapCurveBlue;
12686 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12687 for (size_t j = 0; j < 2; j++) {
12688 tonemapCurveBlue.tonemap_points[i][j] =
12689 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12690 point++;
12691 }
12692 }
12693 tonemapCurves.curves[1] = tonemapCurveBlue;
12694
12695 /* ch 2 = R */
12696 point = 0;
12697 cam_tonemap_curve_t tonemapCurveRed;
12698 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12699 for (size_t j = 0; j < 2; j++) {
12700 tonemapCurveRed.tonemap_points[i][j] =
12701 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12702 point++;
12703 }
12704 }
12705 tonemapCurves.curves[2] = tonemapCurveRed;
12706
12707 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12708 tonemapCurves)) {
12709 rc = BAD_VALUE;
12710 }
12711 }
12712
12713 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12714 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12715 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12716 captureIntent)) {
12717 rc = BAD_VALUE;
12718 }
12719 }
12720
12721 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12722 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12723 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12724 blackLevelLock)) {
12725 rc = BAD_VALUE;
12726 }
12727 }
12728
12729 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12730 uint8_t lensShadingMapMode =
12731 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12732 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12733 lensShadingMapMode)) {
12734 rc = BAD_VALUE;
12735 }
12736 }
12737
12738 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12739 cam_area_t roi;
12740 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012741 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012742
12743 // Map coordinate system from active array to sensor output.
12744 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12745 roi.rect.height);
12746
12747 if (scalerCropSet) {
12748 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12749 }
12750 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12751 rc = BAD_VALUE;
12752 }
12753 }
12754
12755 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12756 cam_area_t roi;
12757 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012758 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012759
12760 // Map coordinate system from active array to sensor output.
12761 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12762 roi.rect.height);
12763
12764 if (scalerCropSet) {
12765 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12766 }
12767 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12768 rc = BAD_VALUE;
12769 }
12770 }
12771
12772 // CDS for non-HFR non-video mode
12773 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12774 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12775 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12776 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12777 LOGE("Invalid CDS mode %d!", *fwk_cds);
12778 } else {
12779 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12780 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12781 rc = BAD_VALUE;
12782 }
12783 }
12784 }
12785
Thierry Strudel04e026f2016-10-10 11:27:36 -070012786 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012787 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012788 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012789 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12790 }
12791 if (m_bVideoHdrEnabled)
12792 vhdr = CAM_VIDEO_HDR_MODE_ON;
12793
Thierry Strudel54dc9782017-02-15 12:12:10 -080012794 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12795
12796 if(vhdr != curr_hdr_state)
12797 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12798
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012799 rc = setVideoHdrMode(mParameters, vhdr);
12800 if (rc != NO_ERROR) {
12801 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012802 }
12803
12804 //IR
12805 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12806 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12807 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012808 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12809 uint8_t isIRon = 0;
12810
12811 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012812 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12813 LOGE("Invalid IR mode %d!", fwk_ir);
12814 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012815 if(isIRon != curr_ir_state )
12816 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12817
Thierry Strudel04e026f2016-10-10 11:27:36 -070012818 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12819 CAM_INTF_META_IR_MODE, fwk_ir)) {
12820 rc = BAD_VALUE;
12821 }
12822 }
12823 }
12824
Thierry Strudel54dc9782017-02-15 12:12:10 -080012825 //Binning Correction Mode
12826 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12827 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12828 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12829 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12830 || (0 > fwk_binning_correction)) {
12831 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12832 } else {
12833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12834 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12835 rc = BAD_VALUE;
12836 }
12837 }
12838 }
12839
Thierry Strudel269c81a2016-10-12 12:13:59 -070012840 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12841 float aec_speed;
12842 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12843 LOGD("AEC Speed :%f", aec_speed);
12844 if ( aec_speed < 0 ) {
12845 LOGE("Invalid AEC mode %f!", aec_speed);
12846 } else {
12847 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12848 aec_speed)) {
12849 rc = BAD_VALUE;
12850 }
12851 }
12852 }
12853
12854 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12855 float awb_speed;
12856 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12857 LOGD("AWB Speed :%f", awb_speed);
12858 if ( awb_speed < 0 ) {
12859 LOGE("Invalid AWB mode %f!", awb_speed);
12860 } else {
12861 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12862 awb_speed)) {
12863 rc = BAD_VALUE;
12864 }
12865 }
12866 }
12867
Thierry Strudel3d639192016-09-09 11:52:26 -070012868 // TNR
12869 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12870 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12871 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012872 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012873 cam_denoise_param_t tnr;
12874 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12875 tnr.process_plates =
12876 (cam_denoise_process_type_t)frame_settings.find(
12877 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12878 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012879
12880 if(b_TnrRequested != curr_tnr_state)
12881 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12882
Thierry Strudel3d639192016-09-09 11:52:26 -070012883 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12884 rc = BAD_VALUE;
12885 }
12886 }
12887
Thierry Strudel54dc9782017-02-15 12:12:10 -080012888 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012889 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012890 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012891 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12892 *exposure_metering_mode)) {
12893 rc = BAD_VALUE;
12894 }
12895 }
12896
Thierry Strudel3d639192016-09-09 11:52:26 -070012897 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12898 int32_t fwk_testPatternMode =
12899 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12900 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12901 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12902
12903 if (NAME_NOT_FOUND != testPatternMode) {
12904 cam_test_pattern_data_t testPatternData;
12905 memset(&testPatternData, 0, sizeof(testPatternData));
12906 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12907 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12908 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12909 int32_t *fwk_testPatternData =
12910 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12911 testPatternData.r = fwk_testPatternData[0];
12912 testPatternData.b = fwk_testPatternData[3];
12913 switch (gCamCapability[mCameraId]->color_arrangement) {
12914 case CAM_FILTER_ARRANGEMENT_RGGB:
12915 case CAM_FILTER_ARRANGEMENT_GRBG:
12916 testPatternData.gr = fwk_testPatternData[1];
12917 testPatternData.gb = fwk_testPatternData[2];
12918 break;
12919 case CAM_FILTER_ARRANGEMENT_GBRG:
12920 case CAM_FILTER_ARRANGEMENT_BGGR:
12921 testPatternData.gr = fwk_testPatternData[2];
12922 testPatternData.gb = fwk_testPatternData[1];
12923 break;
12924 default:
12925 LOGE("color arrangement %d is not supported",
12926 gCamCapability[mCameraId]->color_arrangement);
12927 break;
12928 }
12929 }
12930 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12931 testPatternData)) {
12932 rc = BAD_VALUE;
12933 }
12934 } else {
12935 LOGE("Invalid framework sensor test pattern mode %d",
12936 fwk_testPatternMode);
12937 }
12938 }
12939
12940 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12941 size_t count = 0;
12942 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12943 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12944 gps_coords.data.d, gps_coords.count, count);
12945 if (gps_coords.count != count) {
12946 rc = BAD_VALUE;
12947 }
12948 }
12949
12950 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12951 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12952 size_t count = 0;
12953 const char *gps_methods_src = (const char *)
12954 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12955 memset(gps_methods, '\0', sizeof(gps_methods));
12956 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12957 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12958 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12959 if (GPS_PROCESSING_METHOD_SIZE != count) {
12960 rc = BAD_VALUE;
12961 }
12962 }
12963
12964 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12965 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12966 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12967 gps_timestamp)) {
12968 rc = BAD_VALUE;
12969 }
12970 }
12971
12972 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12973 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12974 cam_rotation_info_t rotation_info;
12975 if (orientation == 0) {
12976 rotation_info.rotation = ROTATE_0;
12977 } else if (orientation == 90) {
12978 rotation_info.rotation = ROTATE_90;
12979 } else if (orientation == 180) {
12980 rotation_info.rotation = ROTATE_180;
12981 } else if (orientation == 270) {
12982 rotation_info.rotation = ROTATE_270;
12983 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012984 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012985 rotation_info.streamId = snapshotStreamId;
12986 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12987 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12988 rc = BAD_VALUE;
12989 }
12990 }
12991
12992 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12993 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12994 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12995 rc = BAD_VALUE;
12996 }
12997 }
12998
12999 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13000 uint32_t thumb_quality = (uint32_t)
13001 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13002 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13003 thumb_quality)) {
13004 rc = BAD_VALUE;
13005 }
13006 }
13007
13008 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13009 cam_dimension_t dim;
13010 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13011 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13012 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13013 rc = BAD_VALUE;
13014 }
13015 }
13016
13017 // Internal metadata
13018 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13019 size_t count = 0;
13020 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13021 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13022 privatedata.data.i32, privatedata.count, count);
13023 if (privatedata.count != count) {
13024 rc = BAD_VALUE;
13025 }
13026 }
13027
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013028 // ISO/Exposure Priority
13029 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13030 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13031 cam_priority_mode_t mode =
13032 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13033 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13034 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13035 use_iso_exp_pty.previewOnly = FALSE;
13036 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13037 use_iso_exp_pty.value = *ptr;
13038
13039 if(CAM_ISO_PRIORITY == mode) {
13040 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13041 use_iso_exp_pty)) {
13042 rc = BAD_VALUE;
13043 }
13044 }
13045 else {
13046 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13047 use_iso_exp_pty)) {
13048 rc = BAD_VALUE;
13049 }
13050 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013051
13052 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13053 rc = BAD_VALUE;
13054 }
13055 }
13056 } else {
13057 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13058 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013059 }
13060 }
13061
13062 // Saturation
13063 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13064 int32_t* use_saturation =
13065 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13066 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13067 rc = BAD_VALUE;
13068 }
13069 }
13070
Thierry Strudel3d639192016-09-09 11:52:26 -070013071 // EV step
13072 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13073 gCamCapability[mCameraId]->exp_compensation_step)) {
13074 rc = BAD_VALUE;
13075 }
13076
13077 // CDS info
13078 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13079 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13080 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13081
13082 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13083 CAM_INTF_META_CDS_DATA, *cdsData)) {
13084 rc = BAD_VALUE;
13085 }
13086 }
13087
Shuzhen Wang19463d72016-03-08 11:09:52 -080013088 // Hybrid AE
13089 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13090 uint8_t *hybrid_ae = (uint8_t *)
13091 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
13092
13093 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13094 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13095 rc = BAD_VALUE;
13096 }
13097 }
13098
Shuzhen Wang14415f52016-11-16 18:26:18 -080013099 // Histogram
13100 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13101 uint8_t histogramMode =
13102 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13103 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13104 histogramMode)) {
13105 rc = BAD_VALUE;
13106 }
13107 }
13108
13109 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13110 int32_t histogramBins =
13111 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13112 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13113 histogramBins)) {
13114 rc = BAD_VALUE;
13115 }
13116 }
13117
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013118 // Tracking AF
13119 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13120 uint8_t trackingAfTrigger =
13121 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13122 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13123 trackingAfTrigger)) {
13124 rc = BAD_VALUE;
13125 }
13126 }
13127
Chien-Yu Chendbd619b2017-08-04 17:50:11 -070013128 // Makernote
13129 camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13130 if (entry.count != 0) {
13131 if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13132 cam_makernote_t makernote;
13133 makernote.length = entry.count;
13134 memcpy(makernote.data, entry.data.u8, makernote.length);
13135 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13136 rc = BAD_VALUE;
13137 }
13138 } else {
13139 ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13140 MAX_MAKERNOTE_LENGTH);
13141 rc = BAD_VALUE;
13142 }
13143 }
13144
Thierry Strudel3d639192016-09-09 11:52:26 -070013145 return rc;
13146}
13147
13148/*===========================================================================
13149 * FUNCTION : captureResultCb
13150 *
13151 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13152 *
13153 * PARAMETERS :
13154 * @frame : frame information from mm-camera-interface
13155 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13156 * @userdata: userdata
13157 *
13158 * RETURN : NONE
13159 *==========================================================================*/
13160void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13161 camera3_stream_buffer_t *buffer,
13162 uint32_t frame_number, bool isInputBuffer, void *userdata)
13163{
13164 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13165 if (hw == NULL) {
13166 LOGE("Invalid hw %p", hw);
13167 return;
13168 }
13169
13170 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13171 return;
13172}
13173
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013174/*===========================================================================
13175 * FUNCTION : setBufferErrorStatus
13176 *
13177 * DESCRIPTION: Callback handler for channels to report any buffer errors
13178 *
13179 * PARAMETERS :
13180 * @ch : Channel on which buffer error is reported from
13181 * @frame_number : frame number on which buffer error is reported on
13182 * @buffer_status : buffer error status
13183 * @userdata: userdata
13184 *
13185 * RETURN : NONE
13186 *==========================================================================*/
13187void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13188 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13189{
13190 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13191 if (hw == NULL) {
13192 LOGE("Invalid hw %p", hw);
13193 return;
13194 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013195
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013196 hw->setBufferErrorStatus(ch, frame_number, err);
13197 return;
13198}
13199
13200void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13201 uint32_t frameNumber, camera3_buffer_status_t err)
13202{
13203 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13204 pthread_mutex_lock(&mMutex);
13205
13206 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13207 if (req.frame_number != frameNumber)
13208 continue;
13209 for (auto& k : req.mPendingBufferList) {
13210 if(k.stream->priv == ch) {
13211 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13212 }
13213 }
13214 }
13215
13216 pthread_mutex_unlock(&mMutex);
13217 return;
13218}
Thierry Strudel3d639192016-09-09 11:52:26 -070013219/*===========================================================================
13220 * FUNCTION : initialize
13221 *
13222 * DESCRIPTION: Pass framework callback pointers to HAL
13223 *
13224 * PARAMETERS :
13225 *
13226 *
13227 * RETURN : Success : 0
13228 * Failure: -ENODEV
13229 *==========================================================================*/
13230
13231int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13232 const camera3_callback_ops_t *callback_ops)
13233{
13234 LOGD("E");
13235 QCamera3HardwareInterface *hw =
13236 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13237 if (!hw) {
13238 LOGE("NULL camera device");
13239 return -ENODEV;
13240 }
13241
13242 int rc = hw->initialize(callback_ops);
13243 LOGD("X");
13244 return rc;
13245}
13246
13247/*===========================================================================
13248 * FUNCTION : configure_streams
13249 *
13250 * DESCRIPTION:
13251 *
13252 * PARAMETERS :
13253 *
13254 *
13255 * RETURN : Success: 0
13256 * Failure: -EINVAL (if stream configuration is invalid)
13257 * -ENODEV (fatal error)
13258 *==========================================================================*/
13259
13260int QCamera3HardwareInterface::configure_streams(
13261 const struct camera3_device *device,
13262 camera3_stream_configuration_t *stream_list)
13263{
13264 LOGD("E");
13265 QCamera3HardwareInterface *hw =
13266 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13267 if (!hw) {
13268 LOGE("NULL camera device");
13269 return -ENODEV;
13270 }
13271 int rc = hw->configureStreams(stream_list);
13272 LOGD("X");
13273 return rc;
13274}
13275
13276/*===========================================================================
13277 * FUNCTION : construct_default_request_settings
13278 *
13279 * DESCRIPTION: Configure a settings buffer to meet the required use case
13280 *
13281 * PARAMETERS :
13282 *
13283 *
13284 * RETURN : Success: Return valid metadata
13285 * Failure: Return NULL
13286 *==========================================================================*/
13287const camera_metadata_t* QCamera3HardwareInterface::
13288 construct_default_request_settings(const struct camera3_device *device,
13289 int type)
13290{
13291
13292 LOGD("E");
13293 camera_metadata_t* fwk_metadata = NULL;
13294 QCamera3HardwareInterface *hw =
13295 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13296 if (!hw) {
13297 LOGE("NULL camera device");
13298 return NULL;
13299 }
13300
13301 fwk_metadata = hw->translateCapabilityToMetadata(type);
13302
13303 LOGD("X");
13304 return fwk_metadata;
13305}
13306
13307/*===========================================================================
13308 * FUNCTION : process_capture_request
13309 *
13310 * DESCRIPTION:
13311 *
13312 * PARAMETERS :
13313 *
13314 *
13315 * RETURN :
13316 *==========================================================================*/
13317int QCamera3HardwareInterface::process_capture_request(
13318 const struct camera3_device *device,
13319 camera3_capture_request_t *request)
13320{
13321 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013322 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013323 QCamera3HardwareInterface *hw =
13324 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13325 if (!hw) {
13326 LOGE("NULL camera device");
13327 return -EINVAL;
13328 }
13329
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013330 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013331 LOGD("X");
13332 return rc;
13333}
13334
13335/*===========================================================================
13336 * FUNCTION : dump
13337 *
13338 * DESCRIPTION:
13339 *
13340 * PARAMETERS :
13341 *
13342 *
13343 * RETURN :
13344 *==========================================================================*/
13345
13346void QCamera3HardwareInterface::dump(
13347 const struct camera3_device *device, int fd)
13348{
13349 /* Log level property is read when "adb shell dumpsys media.camera" is
13350 called so that the log level can be controlled without restarting
13351 the media server */
13352 getLogLevel();
13353
13354 LOGD("E");
13355 QCamera3HardwareInterface *hw =
13356 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13357 if (!hw) {
13358 LOGE("NULL camera device");
13359 return;
13360 }
13361
13362 hw->dump(fd);
13363 LOGD("X");
13364 return;
13365}
13366
13367/*===========================================================================
13368 * FUNCTION : flush
13369 *
13370 * DESCRIPTION:
13371 *
13372 * PARAMETERS :
13373 *
13374 *
13375 * RETURN :
13376 *==========================================================================*/
13377
13378int QCamera3HardwareInterface::flush(
13379 const struct camera3_device *device)
13380{
13381 int rc;
13382 LOGD("E");
13383 QCamera3HardwareInterface *hw =
13384 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13385 if (!hw) {
13386 LOGE("NULL camera device");
13387 return -EINVAL;
13388 }
13389
13390 pthread_mutex_lock(&hw->mMutex);
13391 // Validate current state
13392 switch (hw->mState) {
13393 case STARTED:
13394 /* valid state */
13395 break;
13396
13397 case ERROR:
13398 pthread_mutex_unlock(&hw->mMutex);
13399 hw->handleCameraDeviceError();
13400 return -ENODEV;
13401
13402 default:
13403 LOGI("Flush returned during state %d", hw->mState);
13404 pthread_mutex_unlock(&hw->mMutex);
13405 return 0;
13406 }
13407 pthread_mutex_unlock(&hw->mMutex);
13408
13409 rc = hw->flush(true /* restart channels */ );
13410 LOGD("X");
13411 return rc;
13412}
13413
13414/*===========================================================================
13415 * FUNCTION : close_camera_device
13416 *
13417 * DESCRIPTION:
13418 *
13419 * PARAMETERS :
13420 *
13421 *
13422 * RETURN :
13423 *==========================================================================*/
13424int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13425{
13426 int ret = NO_ERROR;
13427 QCamera3HardwareInterface *hw =
13428 reinterpret_cast<QCamera3HardwareInterface *>(
13429 reinterpret_cast<camera3_device_t *>(device)->priv);
13430 if (!hw) {
13431 LOGE("NULL camera device");
13432 return BAD_VALUE;
13433 }
13434
13435 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13436 delete hw;
13437 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013438 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013439 return ret;
13440}
13441
13442/*===========================================================================
13443 * FUNCTION : getWaveletDenoiseProcessPlate
13444 *
13445 * DESCRIPTION: query wavelet denoise process plate
13446 *
13447 * PARAMETERS : None
13448 *
13449 * RETURN : WNR prcocess plate value
13450 *==========================================================================*/
13451cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13452{
13453 char prop[PROPERTY_VALUE_MAX];
13454 memset(prop, 0, sizeof(prop));
13455 property_get("persist.denoise.process.plates", prop, "0");
13456 int processPlate = atoi(prop);
13457 switch(processPlate) {
13458 case 0:
13459 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13460 case 1:
13461 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13462 case 2:
13463 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13464 case 3:
13465 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13466 default:
13467 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13468 }
13469}
13470
13471
13472/*===========================================================================
13473 * FUNCTION : getTemporalDenoiseProcessPlate
13474 *
13475 * DESCRIPTION: query temporal denoise process plate
13476 *
13477 * PARAMETERS : None
13478 *
13479 * RETURN : TNR prcocess plate value
13480 *==========================================================================*/
13481cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13482{
13483 char prop[PROPERTY_VALUE_MAX];
13484 memset(prop, 0, sizeof(prop));
13485 property_get("persist.tnr.process.plates", prop, "0");
13486 int processPlate = atoi(prop);
13487 switch(processPlate) {
13488 case 0:
13489 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13490 case 1:
13491 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13492 case 2:
13493 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13494 case 3:
13495 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13496 default:
13497 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13498 }
13499}
13500
13501
13502/*===========================================================================
13503 * FUNCTION : extractSceneMode
13504 *
13505 * DESCRIPTION: Extract scene mode from frameworks set metadata
13506 *
13507 * PARAMETERS :
13508 * @frame_settings: CameraMetadata reference
13509 * @metaMode: ANDROID_CONTORL_MODE
13510 * @hal_metadata: hal metadata structure
13511 *
13512 * RETURN : None
13513 *==========================================================================*/
13514int32_t QCamera3HardwareInterface::extractSceneMode(
13515 const CameraMetadata &frame_settings, uint8_t metaMode,
13516 metadata_buffer_t *hal_metadata)
13517{
13518 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013519 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13520
13521 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13522 LOGD("Ignoring control mode OFF_KEEP_STATE");
13523 return NO_ERROR;
13524 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013525
13526 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13527 camera_metadata_ro_entry entry =
13528 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13529 if (0 == entry.count)
13530 return rc;
13531
13532 uint8_t fwk_sceneMode = entry.data.u8[0];
13533
13534 int val = lookupHalName(SCENE_MODES_MAP,
13535 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13536 fwk_sceneMode);
13537 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013538 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013539 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013540 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013541 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013542
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013543 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13544 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13545 }
13546
13547 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13548 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013549 cam_hdr_param_t hdr_params;
13550 hdr_params.hdr_enable = 1;
13551 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13552 hdr_params.hdr_need_1x = false;
13553 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13554 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13555 rc = BAD_VALUE;
13556 }
13557 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013558
Thierry Strudel3d639192016-09-09 11:52:26 -070013559 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13560 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13561 rc = BAD_VALUE;
13562 }
13563 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013564
13565 if (mForceHdrSnapshot) {
13566 cam_hdr_param_t hdr_params;
13567 hdr_params.hdr_enable = 1;
13568 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13569 hdr_params.hdr_need_1x = false;
13570 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13571 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13572 rc = BAD_VALUE;
13573 }
13574 }
13575
Thierry Strudel3d639192016-09-09 11:52:26 -070013576 return rc;
13577}
13578
13579/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013580 * FUNCTION : setVideoHdrMode
13581 *
13582 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13583 *
13584 * PARAMETERS :
13585 * @hal_metadata: hal metadata structure
13586 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13587 *
13588 * RETURN : None
13589 *==========================================================================*/
13590int32_t QCamera3HardwareInterface::setVideoHdrMode(
13591 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13592{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013593 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13594 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13595 }
13596
13597 LOGE("Invalid Video HDR mode %d!", vhdr);
13598 return BAD_VALUE;
13599}
13600
13601/*===========================================================================
13602 * FUNCTION : setSensorHDR
13603 *
13604 * DESCRIPTION: Enable/disable sensor HDR.
13605 *
13606 * PARAMETERS :
13607 * @hal_metadata: hal metadata structure
13608 * @enable: boolean whether to enable/disable sensor HDR
13609 *
13610 * RETURN : None
13611 *==========================================================================*/
13612int32_t QCamera3HardwareInterface::setSensorHDR(
13613 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13614{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013615 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013616 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13617
13618 if (enable) {
13619 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13620 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13621 #ifdef _LE_CAMERA_
13622 //Default to staggered HDR for IOT
13623 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13624 #else
13625 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13626 #endif
13627 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13628 }
13629
13630 bool isSupported = false;
13631 switch (sensor_hdr) {
13632 case CAM_SENSOR_HDR_IN_SENSOR:
13633 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13634 CAM_QCOM_FEATURE_SENSOR_HDR) {
13635 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013636 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013637 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013638 break;
13639 case CAM_SENSOR_HDR_ZIGZAG:
13640 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13641 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13642 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013643 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013644 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013645 break;
13646 case CAM_SENSOR_HDR_STAGGERED:
13647 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13648 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13649 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013650 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013651 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013652 break;
13653 case CAM_SENSOR_HDR_OFF:
13654 isSupported = true;
13655 LOGD("Turning off sensor HDR");
13656 break;
13657 default:
13658 LOGE("HDR mode %d not supported", sensor_hdr);
13659 rc = BAD_VALUE;
13660 break;
13661 }
13662
13663 if(isSupported) {
13664 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13665 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13666 rc = BAD_VALUE;
13667 } else {
13668 if(!isVideoHdrEnable)
13669 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013670 }
13671 }
13672 return rc;
13673}
13674
13675/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013676 * FUNCTION : needRotationReprocess
13677 *
13678 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13679 *
13680 * PARAMETERS : none
13681 *
13682 * RETURN : true: needed
13683 * false: no need
13684 *==========================================================================*/
13685bool QCamera3HardwareInterface::needRotationReprocess()
13686{
13687 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13688 // current rotation is not zero, and pp has the capability to process rotation
13689 LOGH("need do reprocess for rotation");
13690 return true;
13691 }
13692
13693 return false;
13694}
13695
13696/*===========================================================================
13697 * FUNCTION : needReprocess
13698 *
13699 * DESCRIPTION: if reprocess in needed
13700 *
13701 * PARAMETERS : none
13702 *
13703 * RETURN : true: needed
13704 * false: no need
13705 *==========================================================================*/
13706bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13707{
13708 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13709 // TODO: add for ZSL HDR later
13710 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13711 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13712 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13713 return true;
13714 } else {
13715 LOGH("already post processed frame");
13716 return false;
13717 }
13718 }
13719 return needRotationReprocess();
13720}
13721
13722/*===========================================================================
13723 * FUNCTION : needJpegExifRotation
13724 *
13725 * DESCRIPTION: if rotation from jpeg is needed
13726 *
13727 * PARAMETERS : none
13728 *
13729 * RETURN : true: needed
13730 * false: no need
13731 *==========================================================================*/
13732bool QCamera3HardwareInterface::needJpegExifRotation()
13733{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013734 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013735 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13736 LOGD("Need use Jpeg EXIF Rotation");
13737 return true;
13738 }
13739 return false;
13740}
13741
13742/*===========================================================================
13743 * FUNCTION : addOfflineReprocChannel
13744 *
13745 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13746 * coming from input channel
13747 *
13748 * PARAMETERS :
13749 * @config : reprocess configuration
13750 * @inputChHandle : pointer to the input (source) channel
13751 *
13752 *
13753 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13754 *==========================================================================*/
13755QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13756 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13757{
13758 int32_t rc = NO_ERROR;
13759 QCamera3ReprocessChannel *pChannel = NULL;
13760
13761 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013762 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13763 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013764 if (NULL == pChannel) {
13765 LOGE("no mem for reprocess channel");
13766 return NULL;
13767 }
13768
13769 rc = pChannel->initialize(IS_TYPE_NONE);
13770 if (rc != NO_ERROR) {
13771 LOGE("init reprocess channel failed, ret = %d", rc);
13772 delete pChannel;
13773 return NULL;
13774 }
13775
13776 // pp feature config
13777 cam_pp_feature_config_t pp_config;
13778 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13779
13780 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13781 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13782 & CAM_QCOM_FEATURE_DSDN) {
13783 //Use CPP CDS incase h/w supports it.
13784 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13785 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13786 }
13787 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13788 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13789 }
13790
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013791 if (config.hdr_param.hdr_enable) {
13792 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13793 pp_config.hdr_param = config.hdr_param;
13794 }
13795
13796 if (mForceHdrSnapshot) {
13797 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13798 pp_config.hdr_param.hdr_enable = 1;
13799 pp_config.hdr_param.hdr_need_1x = 0;
13800 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13801 }
13802
Thierry Strudel3d639192016-09-09 11:52:26 -070013803 rc = pChannel->addReprocStreamsFromSource(pp_config,
13804 config,
13805 IS_TYPE_NONE,
13806 mMetadataChannel);
13807
13808 if (rc != NO_ERROR) {
13809 delete pChannel;
13810 return NULL;
13811 }
13812 return pChannel;
13813}
13814
13815/*===========================================================================
13816 * FUNCTION : getMobicatMask
13817 *
13818 * DESCRIPTION: returns mobicat mask
13819 *
13820 * PARAMETERS : none
13821 *
13822 * RETURN : mobicat mask
13823 *
13824 *==========================================================================*/
13825uint8_t QCamera3HardwareInterface::getMobicatMask()
13826{
13827 return m_MobicatMask;
13828}
13829
13830/*===========================================================================
13831 * FUNCTION : setMobicat
13832 *
13833 * DESCRIPTION: set Mobicat on/off.
13834 *
13835 * PARAMETERS :
13836 * @params : none
13837 *
13838 * RETURN : int32_t type of status
13839 * NO_ERROR -- success
13840 * none-zero failure code
13841 *==========================================================================*/
13842int32_t QCamera3HardwareInterface::setMobicat()
13843{
Thierry Strudel3d639192016-09-09 11:52:26 -070013844 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013845
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013846 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013847 tune_cmd_t tune_cmd;
13848 tune_cmd.type = SET_RELOAD_CHROMATIX;
13849 tune_cmd.module = MODULE_ALL;
13850 tune_cmd.value = TRUE;
13851 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13852 CAM_INTF_PARM_SET_VFE_COMMAND,
13853 tune_cmd);
13854
13855 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13856 CAM_INTF_PARM_SET_PP_COMMAND,
13857 tune_cmd);
13858 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013859
13860 return ret;
13861}
13862
13863/*===========================================================================
13864* FUNCTION : getLogLevel
13865*
13866* DESCRIPTION: Reads the log level property into a variable
13867*
13868* PARAMETERS :
13869* None
13870*
13871* RETURN :
13872* None
13873*==========================================================================*/
13874void QCamera3HardwareInterface::getLogLevel()
13875{
13876 char prop[PROPERTY_VALUE_MAX];
13877 uint32_t globalLogLevel = 0;
13878
13879 property_get("persist.camera.hal.debug", prop, "0");
13880 int val = atoi(prop);
13881 if (0 <= val) {
13882 gCamHal3LogLevel = (uint32_t)val;
13883 }
13884
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013885 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013886 gKpiDebugLevel = atoi(prop);
13887
13888 property_get("persist.camera.global.debug", prop, "0");
13889 val = atoi(prop);
13890 if (0 <= val) {
13891 globalLogLevel = (uint32_t)val;
13892 }
13893
13894 /* Highest log level among hal.logs and global.logs is selected */
13895 if (gCamHal3LogLevel < globalLogLevel)
13896 gCamHal3LogLevel = globalLogLevel;
13897
13898 return;
13899}
13900
13901/*===========================================================================
13902 * FUNCTION : validateStreamRotations
13903 *
13904 * DESCRIPTION: Check if the rotations requested are supported
13905 *
13906 * PARAMETERS :
13907 * @stream_list : streams to be configured
13908 *
13909 * RETURN : NO_ERROR on success
13910 * -EINVAL on failure
13911 *
13912 *==========================================================================*/
13913int QCamera3HardwareInterface::validateStreamRotations(
13914 camera3_stream_configuration_t *streamList)
13915{
13916 int rc = NO_ERROR;
13917
13918 /*
13919 * Loop through all streams requested in configuration
13920 * Check if unsupported rotations have been requested on any of them
13921 */
13922 for (size_t j = 0; j < streamList->num_streams; j++){
13923 camera3_stream_t *newStream = streamList->streams[j];
13924
Emilian Peev35ceeed2017-06-29 11:58:56 -070013925 switch(newStream->rotation) {
13926 case CAMERA3_STREAM_ROTATION_0:
13927 case CAMERA3_STREAM_ROTATION_90:
13928 case CAMERA3_STREAM_ROTATION_180:
13929 case CAMERA3_STREAM_ROTATION_270:
13930 //Expected values
13931 break;
13932 default:
13933 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13934 "type:%d and stream format:%d", __func__,
13935 newStream->rotation, newStream->stream_type,
13936 newStream->format);
13937 return -EINVAL;
13938 }
13939
Thierry Strudel3d639192016-09-09 11:52:26 -070013940 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13941 bool isImplDef = (newStream->format ==
13942 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13943 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13944 isImplDef);
13945
13946 if (isRotated && (!isImplDef || isZsl)) {
13947 LOGE("Error: Unsupported rotation of %d requested for stream"
13948 "type:%d and stream format:%d",
13949 newStream->rotation, newStream->stream_type,
13950 newStream->format);
13951 rc = -EINVAL;
13952 break;
13953 }
13954 }
13955
13956 return rc;
13957}
13958
13959/*===========================================================================
13960* FUNCTION : getFlashInfo
13961*
13962* DESCRIPTION: Retrieve information about whether the device has a flash.
13963*
13964* PARAMETERS :
13965* @cameraId : Camera id to query
13966* @hasFlash : Boolean indicating whether there is a flash device
13967* associated with given camera
13968* @flashNode : If a flash device exists, this will be its device node.
13969*
13970* RETURN :
13971* None
13972*==========================================================================*/
13973void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13974 bool& hasFlash,
13975 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13976{
13977 cam_capability_t* camCapability = gCamCapability[cameraId];
13978 if (NULL == camCapability) {
13979 hasFlash = false;
13980 flashNode[0] = '\0';
13981 } else {
13982 hasFlash = camCapability->flash_available;
13983 strlcpy(flashNode,
13984 (char*)camCapability->flash_dev_name,
13985 QCAMERA_MAX_FILEPATH_LENGTH);
13986 }
13987}
13988
13989/*===========================================================================
13990* FUNCTION : getEepromVersionInfo
13991*
13992* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13993*
13994* PARAMETERS : None
13995*
13996* RETURN : string describing EEPROM version
13997* "\0" if no such info available
13998*==========================================================================*/
13999const char *QCamera3HardwareInterface::getEepromVersionInfo()
14000{
14001 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14002}
14003
14004/*===========================================================================
14005* FUNCTION : getLdafCalib
14006*
14007* DESCRIPTION: Retrieve Laser AF calibration data
14008*
14009* PARAMETERS : None
14010*
14011* RETURN : Two uint32_t describing laser AF calibration data
14012* NULL if none is available.
14013*==========================================================================*/
14014const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14015{
14016 if (mLdafCalibExist) {
14017 return &mLdafCalib[0];
14018 } else {
14019 return NULL;
14020 }
14021}
14022
14023/*===========================================================================
14024 * FUNCTION : dynamicUpdateMetaStreamInfo
14025 *
14026 * DESCRIPTION: This function:
14027 * (1) stops all the channels
14028 * (2) returns error on pending requests and buffers
14029 * (3) sends metastream_info in setparams
14030 * (4) starts all channels
14031 * This is useful when sensor has to be restarted to apply any
14032 * settings such as frame rate from a different sensor mode
14033 *
14034 * PARAMETERS : None
14035 *
14036 * RETURN : NO_ERROR on success
14037 * Error codes on failure
14038 *
14039 *==========================================================================*/
14040int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14041{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014042 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014043 int rc = NO_ERROR;
14044
14045 LOGD("E");
14046
14047 rc = stopAllChannels();
14048 if (rc < 0) {
14049 LOGE("stopAllChannels failed");
14050 return rc;
14051 }
14052
14053 rc = notifyErrorForPendingRequests();
14054 if (rc < 0) {
14055 LOGE("notifyErrorForPendingRequests failed");
14056 return rc;
14057 }
14058
14059 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14060 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14061 "Format:%d",
14062 mStreamConfigInfo.type[i],
14063 mStreamConfigInfo.stream_sizes[i].width,
14064 mStreamConfigInfo.stream_sizes[i].height,
14065 mStreamConfigInfo.postprocess_mask[i],
14066 mStreamConfigInfo.format[i]);
14067 }
14068
14069 /* Send meta stream info once again so that ISP can start */
14070 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14071 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14072 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14073 mParameters);
14074 if (rc < 0) {
14075 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14076 }
14077
14078 rc = startAllChannels();
14079 if (rc < 0) {
14080 LOGE("startAllChannels failed");
14081 return rc;
14082 }
14083
14084 LOGD("X");
14085 return rc;
14086}
14087
14088/*===========================================================================
14089 * FUNCTION : stopAllChannels
14090 *
14091 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14092 *
14093 * PARAMETERS : None
14094 *
14095 * RETURN : NO_ERROR on success
14096 * Error codes on failure
14097 *
14098 *==========================================================================*/
14099int32_t QCamera3HardwareInterface::stopAllChannels()
14100{
14101 int32_t rc = NO_ERROR;
14102
14103 LOGD("Stopping all channels");
14104 // Stop the Streams/Channels
14105 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14106 it != mStreamInfo.end(); it++) {
14107 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14108 if (channel) {
14109 channel->stop();
14110 }
14111 (*it)->status = INVALID;
14112 }
14113
14114 if (mSupportChannel) {
14115 mSupportChannel->stop();
14116 }
14117 if (mAnalysisChannel) {
14118 mAnalysisChannel->stop();
14119 }
14120 if (mRawDumpChannel) {
14121 mRawDumpChannel->stop();
14122 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014123 if (mHdrPlusRawSrcChannel) {
14124 mHdrPlusRawSrcChannel->stop();
14125 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014126 if (mMetadataChannel) {
14127 /* If content of mStreamInfo is not 0, there is metadata stream */
14128 mMetadataChannel->stop();
14129 }
14130
14131 LOGD("All channels stopped");
14132 return rc;
14133}
14134
14135/*===========================================================================
14136 * FUNCTION : startAllChannels
14137 *
14138 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14139 *
14140 * PARAMETERS : None
14141 *
14142 * RETURN : NO_ERROR on success
14143 * Error codes on failure
14144 *
14145 *==========================================================================*/
14146int32_t QCamera3HardwareInterface::startAllChannels()
14147{
14148 int32_t rc = NO_ERROR;
14149
14150 LOGD("Start all channels ");
14151 // Start the Streams/Channels
14152 if (mMetadataChannel) {
14153 /* If content of mStreamInfo is not 0, there is metadata stream */
14154 rc = mMetadataChannel->start();
14155 if (rc < 0) {
14156 LOGE("META channel start failed");
14157 return rc;
14158 }
14159 }
14160 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14161 it != mStreamInfo.end(); it++) {
14162 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14163 if (channel) {
14164 rc = channel->start();
14165 if (rc < 0) {
14166 LOGE("channel start failed");
14167 return rc;
14168 }
14169 }
14170 }
14171 if (mAnalysisChannel) {
14172 mAnalysisChannel->start();
14173 }
14174 if (mSupportChannel) {
14175 rc = mSupportChannel->start();
14176 if (rc < 0) {
14177 LOGE("Support channel start failed");
14178 return rc;
14179 }
14180 }
14181 if (mRawDumpChannel) {
14182 rc = mRawDumpChannel->start();
14183 if (rc < 0) {
14184 LOGE("RAW dump channel start failed");
14185 return rc;
14186 }
14187 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014188 if (mHdrPlusRawSrcChannel) {
14189 rc = mHdrPlusRawSrcChannel->start();
14190 if (rc < 0) {
14191 LOGE("HDR+ RAW channel start failed");
14192 return rc;
14193 }
14194 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014195
14196 LOGD("All channels started");
14197 return rc;
14198}
14199
14200/*===========================================================================
14201 * FUNCTION : notifyErrorForPendingRequests
14202 *
14203 * DESCRIPTION: This function sends error for all the pending requests/buffers
14204 *
14205 * PARAMETERS : None
14206 *
14207 * RETURN : Error codes
14208 * NO_ERROR on success
14209 *
14210 *==========================================================================*/
14211int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14212{
Emilian Peev7650c122017-01-19 08:24:33 -080014213 notifyErrorFoPendingDepthData(mDepthChannel);
14214
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014215 auto pendingRequest = mPendingRequestsList.begin();
14216 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014217
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014218 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14219 // buffers (for which buffers aren't sent yet).
14220 while (pendingRequest != mPendingRequestsList.end() ||
14221 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14222 if (pendingRequest == mPendingRequestsList.end() ||
14223 pendingBuffer->frame_number < pendingRequest->frame_number) {
14224 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14225 // with error.
14226 for (auto &info : pendingBuffer->mPendingBufferList) {
14227 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014228 camera3_notify_msg_t notify_msg;
14229 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14230 notify_msg.type = CAMERA3_MSG_ERROR;
14231 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014232 notify_msg.message.error.error_stream = info.stream;
14233 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014234 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014235
14236 camera3_stream_buffer_t buffer = {};
14237 buffer.acquire_fence = -1;
14238 buffer.release_fence = -1;
14239 buffer.buffer = info.buffer;
14240 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14241 buffer.stream = info.stream;
14242 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014243 }
14244
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014245 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14246 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14247 pendingBuffer->frame_number > pendingRequest->frame_number) {
14248 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014249 camera3_notify_msg_t notify_msg;
14250 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14251 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014252 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14253 notify_msg.message.error.error_stream = nullptr;
14254 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014255 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014256
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014257 if (pendingRequest->input_buffer != nullptr) {
14258 camera3_capture_result result = {};
14259 result.frame_number = pendingRequest->frame_number;
14260 result.result = nullptr;
14261 result.input_buffer = pendingRequest->input_buffer;
14262 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014263 }
14264
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014265 mShutterDispatcher.clear(pendingRequest->frame_number);
14266 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14267 } else {
14268 // If both buffers and result metadata weren't sent yet, notify about a request error
14269 // and return buffers with error.
14270 for (auto &info : pendingBuffer->mPendingBufferList) {
14271 camera3_notify_msg_t notify_msg;
14272 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14273 notify_msg.type = CAMERA3_MSG_ERROR;
14274 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14275 notify_msg.message.error.error_stream = info.stream;
14276 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14277 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014278
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014279 camera3_stream_buffer_t buffer = {};
14280 buffer.acquire_fence = -1;
14281 buffer.release_fence = -1;
14282 buffer.buffer = info.buffer;
14283 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14284 buffer.stream = info.stream;
14285 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14286 }
14287
14288 if (pendingRequest->input_buffer != nullptr) {
14289 camera3_capture_result result = {};
14290 result.frame_number = pendingRequest->frame_number;
14291 result.result = nullptr;
14292 result.input_buffer = pendingRequest->input_buffer;
14293 orchestrateResult(&result);
14294 }
14295
14296 mShutterDispatcher.clear(pendingRequest->frame_number);
14297 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14298 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014299 }
14300 }
14301
14302 /* Reset pending frame Drop list and requests list */
14303 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014304 mShutterDispatcher.clear();
14305 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014306 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014307 mExpectedFrameDuration = 0;
14308 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014309 LOGH("Cleared all the pending buffers ");
14310
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014311 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014312}
14313
14314bool QCamera3HardwareInterface::isOnEncoder(
14315 const cam_dimension_t max_viewfinder_size,
14316 uint32_t width, uint32_t height)
14317{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014318 return ((width > (uint32_t)max_viewfinder_size.width) ||
14319 (height > (uint32_t)max_viewfinder_size.height) ||
14320 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14321 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014322}
14323
14324/*===========================================================================
14325 * FUNCTION : setBundleInfo
14326 *
14327 * DESCRIPTION: Set bundle info for all streams that are bundle.
14328 *
14329 * PARAMETERS : None
14330 *
14331 * RETURN : NO_ERROR on success
14332 * Error codes on failure
14333 *==========================================================================*/
14334int32_t QCamera3HardwareInterface::setBundleInfo()
14335{
14336 int32_t rc = NO_ERROR;
14337
14338 if (mChannelHandle) {
14339 cam_bundle_config_t bundleInfo;
14340 memset(&bundleInfo, 0, sizeof(bundleInfo));
14341 rc = mCameraHandle->ops->get_bundle_info(
14342 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14343 if (rc != NO_ERROR) {
14344 LOGE("get_bundle_info failed");
14345 return rc;
14346 }
14347 if (mAnalysisChannel) {
14348 mAnalysisChannel->setBundleInfo(bundleInfo);
14349 }
14350 if (mSupportChannel) {
14351 mSupportChannel->setBundleInfo(bundleInfo);
14352 }
14353 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14354 it != mStreamInfo.end(); it++) {
14355 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14356 channel->setBundleInfo(bundleInfo);
14357 }
14358 if (mRawDumpChannel) {
14359 mRawDumpChannel->setBundleInfo(bundleInfo);
14360 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014361 if (mHdrPlusRawSrcChannel) {
14362 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14363 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014364 }
14365
14366 return rc;
14367}
14368
14369/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014370 * FUNCTION : setInstantAEC
14371 *
14372 * DESCRIPTION: Set Instant AEC related params.
14373 *
14374 * PARAMETERS :
14375 * @meta: CameraMetadata reference
14376 *
14377 * RETURN : NO_ERROR on success
14378 * Error codes on failure
14379 *==========================================================================*/
14380int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14381{
14382 int32_t rc = NO_ERROR;
14383 uint8_t val = 0;
14384 char prop[PROPERTY_VALUE_MAX];
14385
14386 // First try to configure instant AEC from framework metadata
14387 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14388 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14389 }
14390
14391 // If framework did not set this value, try to read from set prop.
14392 if (val == 0) {
14393 memset(prop, 0, sizeof(prop));
14394 property_get("persist.camera.instant.aec", prop, "0");
14395 val = (uint8_t)atoi(prop);
14396 }
14397
14398 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14399 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14400 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14401 mInstantAEC = val;
14402 mInstantAECSettledFrameNumber = 0;
14403 mInstantAecFrameIdxCount = 0;
14404 LOGH("instantAEC value set %d",val);
14405 if (mInstantAEC) {
14406 memset(prop, 0, sizeof(prop));
14407 property_get("persist.camera.ae.instant.bound", prop, "10");
14408 int32_t aec_frame_skip_cnt = atoi(prop);
14409 if (aec_frame_skip_cnt >= 0) {
14410 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14411 } else {
14412 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14413 rc = BAD_VALUE;
14414 }
14415 }
14416 } else {
14417 LOGE("Bad instant aec value set %d", val);
14418 rc = BAD_VALUE;
14419 }
14420 return rc;
14421}
14422
14423/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014424 * FUNCTION : get_num_overall_buffers
14425 *
14426 * DESCRIPTION: Estimate number of pending buffers across all requests.
14427 *
14428 * PARAMETERS : None
14429 *
14430 * RETURN : Number of overall pending buffers
14431 *
14432 *==========================================================================*/
14433uint32_t PendingBuffersMap::get_num_overall_buffers()
14434{
14435 uint32_t sum_buffers = 0;
14436 for (auto &req : mPendingBuffersInRequest) {
14437 sum_buffers += req.mPendingBufferList.size();
14438 }
14439 return sum_buffers;
14440}
14441
14442/*===========================================================================
14443 * FUNCTION : removeBuf
14444 *
14445 * DESCRIPTION: Remove a matching buffer from tracker.
14446 *
14447 * PARAMETERS : @buffer: image buffer for the callback
14448 *
14449 * RETURN : None
14450 *
14451 *==========================================================================*/
14452void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14453{
14454 bool buffer_found = false;
14455 for (auto req = mPendingBuffersInRequest.begin();
14456 req != mPendingBuffersInRequest.end(); req++) {
14457 for (auto k = req->mPendingBufferList.begin();
14458 k != req->mPendingBufferList.end(); k++ ) {
14459 if (k->buffer == buffer) {
14460 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14461 req->frame_number, buffer);
14462 k = req->mPendingBufferList.erase(k);
14463 if (req->mPendingBufferList.empty()) {
14464 // Remove this request from Map
14465 req = mPendingBuffersInRequest.erase(req);
14466 }
14467 buffer_found = true;
14468 break;
14469 }
14470 }
14471 if (buffer_found) {
14472 break;
14473 }
14474 }
14475 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14476 get_num_overall_buffers());
14477}
14478
14479/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014480 * FUNCTION : getBufErrStatus
14481 *
14482 * DESCRIPTION: get buffer error status
14483 *
14484 * PARAMETERS : @buffer: buffer handle
14485 *
14486 * RETURN : Error status
14487 *
14488 *==========================================================================*/
14489int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14490{
14491 for (auto& req : mPendingBuffersInRequest) {
14492 for (auto& k : req.mPendingBufferList) {
14493 if (k.buffer == buffer)
14494 return k.bufStatus;
14495 }
14496 }
14497 return CAMERA3_BUFFER_STATUS_OK;
14498}
14499
14500/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014501 * FUNCTION : setPAAFSupport
14502 *
14503 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14504 * feature mask according to stream type and filter
14505 * arrangement
14506 *
14507 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14508 * @stream_type: stream type
14509 * @filter_arrangement: filter arrangement
14510 *
14511 * RETURN : None
14512 *==========================================================================*/
14513void QCamera3HardwareInterface::setPAAFSupport(
14514 cam_feature_mask_t& feature_mask,
14515 cam_stream_type_t stream_type,
14516 cam_color_filter_arrangement_t filter_arrangement)
14517{
Thierry Strudel3d639192016-09-09 11:52:26 -070014518 switch (filter_arrangement) {
14519 case CAM_FILTER_ARRANGEMENT_RGGB:
14520 case CAM_FILTER_ARRANGEMENT_GRBG:
14521 case CAM_FILTER_ARRANGEMENT_GBRG:
14522 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014523 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14524 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014525 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014526 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14527 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014528 }
14529 break;
14530 case CAM_FILTER_ARRANGEMENT_Y:
14531 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14532 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14533 }
14534 break;
14535 default:
14536 break;
14537 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014538 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14539 feature_mask, stream_type, filter_arrangement);
14540
14541
Thierry Strudel3d639192016-09-09 11:52:26 -070014542}
14543
14544/*===========================================================================
14545* FUNCTION : getSensorMountAngle
14546*
14547* DESCRIPTION: Retrieve sensor mount angle
14548*
14549* PARAMETERS : None
14550*
14551* RETURN : sensor mount angle in uint32_t
14552*==========================================================================*/
14553uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14554{
14555 return gCamCapability[mCameraId]->sensor_mount_angle;
14556}
14557
14558/*===========================================================================
14559* FUNCTION : getRelatedCalibrationData
14560*
14561* DESCRIPTION: Retrieve related system calibration data
14562*
14563* PARAMETERS : None
14564*
14565* RETURN : Pointer of related system calibration data
14566*==========================================================================*/
14567const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14568{
14569 return (const cam_related_system_calibration_data_t *)
14570 &(gCamCapability[mCameraId]->related_cam_calibration);
14571}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014572
14573/*===========================================================================
14574 * FUNCTION : is60HzZone
14575 *
14576 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14577 *
14578 * PARAMETERS : None
14579 *
14580 * RETURN : True if in 60Hz zone, False otherwise
14581 *==========================================================================*/
14582bool QCamera3HardwareInterface::is60HzZone()
14583{
14584 time_t t = time(NULL);
14585 struct tm lt;
14586
14587 struct tm* r = localtime_r(&t, &lt);
14588
14589 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14590 return true;
14591 else
14592 return false;
14593}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014594
14595/*===========================================================================
14596 * FUNCTION : adjustBlackLevelForCFA
14597 *
14598 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14599 * of bayer CFA (Color Filter Array).
14600 *
14601 * PARAMETERS : @input: black level pattern in the order of RGGB
14602 * @output: black level pattern in the order of CFA
14603 * @color_arrangement: CFA color arrangement
14604 *
14605 * RETURN : None
14606 *==========================================================================*/
14607template<typename T>
14608void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14609 T input[BLACK_LEVEL_PATTERN_CNT],
14610 T output[BLACK_LEVEL_PATTERN_CNT],
14611 cam_color_filter_arrangement_t color_arrangement)
14612{
14613 switch (color_arrangement) {
14614 case CAM_FILTER_ARRANGEMENT_GRBG:
14615 output[0] = input[1];
14616 output[1] = input[0];
14617 output[2] = input[3];
14618 output[3] = input[2];
14619 break;
14620 case CAM_FILTER_ARRANGEMENT_GBRG:
14621 output[0] = input[2];
14622 output[1] = input[3];
14623 output[2] = input[0];
14624 output[3] = input[1];
14625 break;
14626 case CAM_FILTER_ARRANGEMENT_BGGR:
14627 output[0] = input[3];
14628 output[1] = input[2];
14629 output[2] = input[1];
14630 output[3] = input[0];
14631 break;
14632 case CAM_FILTER_ARRANGEMENT_RGGB:
14633 output[0] = input[0];
14634 output[1] = input[1];
14635 output[2] = input[2];
14636 output[3] = input[3];
14637 break;
14638 default:
14639 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14640 break;
14641 }
14642}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014643
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014644void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14645 CameraMetadata &resultMetadata,
14646 std::shared_ptr<metadata_buffer_t> settings)
14647{
14648 if (settings == nullptr) {
14649 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14650 return;
14651 }
14652
14653 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14654 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14655 }
14656
14657 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14658 String8 str((const char *)gps_methods);
14659 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14660 }
14661
14662 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14663 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14664 }
14665
14666 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14667 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14668 }
14669
14670 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14671 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14672 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14673 }
14674
14675 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14676 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14677 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14678 }
14679
14680 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14681 int32_t fwk_thumb_size[2];
14682 fwk_thumb_size[0] = thumb_size->width;
14683 fwk_thumb_size[1] = thumb_size->height;
14684 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14685 }
14686
14687 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14688 uint8_t fwk_intent = intent[0];
14689 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14690 }
14691}
14692
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014693bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
14694 const camera3_capture_request_t &request, const CameraMetadata &metadata) {
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014695 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14696 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14697 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014698 ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014699 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014700 return false;
14701 }
14702
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014703 if (!metadata.exists(ANDROID_EDGE_MODE) ||
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014704 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14705 ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014706 return false;
14707 }
14708
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014709 if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
14710 metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
14711 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
14712 ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
14713 return false;
14714 }
14715
14716 if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
14717 (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
14718 metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
14719 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
14720 ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
14721 return false;
14722 }
14723
14724 if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
14725 metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
14726 ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
14727 return false;
14728 }
14729
14730 if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
14731 metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
14732 ANDROID_CONTROL_EFFECT_MODE_OFF) {
14733 ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
14734 return false;
14735 }
14736
14737 if (!metadata.exists(ANDROID_CONTROL_MODE) ||
14738 (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
14739 metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
14740 ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
14741 ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
14742 return false;
14743 }
14744
14745 // TODO (b/32585046): support non-ZSL.
14746 if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
14747 metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
14748 ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
14749 return false;
14750 }
14751
14752 // TODO (b/32586081): support flash.
14753 if (!metadata.exists(ANDROID_FLASH_MODE) ||
14754 metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
14755 ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
14756 return false;
14757 }
14758
14759 // TODO (b/36492953): support digital zoom.
14760 if (!metadata.exists(ANDROID_SCALER_CROP_REGION) ||
14761 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[0] != 0 ||
14762 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[1] != 0 ||
14763 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[2] !=
14764 gCamCapability[mCameraId]->active_array_size.width ||
14765 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[3] !=
14766 gCamCapability[mCameraId]->active_array_size.height) {
14767 ALOGV("%s: ANDROID_SCALER_CROP_REGION is not the same as active array region.",
14768 __FUNCTION__);
14769 return false;
14770 }
14771
14772 if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
14773 metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
14774 ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
14775 return false;
14776 }
14777
14778 // TODO (b/36693254, b/36690506): support other outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014779 if (request.num_output_buffers != 1 ||
14780 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014781 ALOGV("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014782 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014783 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014784 request.output_buffers[0].stream->width,
14785 request.output_buffers[0].stream->height,
14786 request.output_buffers[0].stream->format);
14787 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014788 return false;
14789 }
14790
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014791 return true;
14792}
14793
14794bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14795 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14796 const CameraMetadata &metadata)
14797{
14798 if (hdrPlusRequest == nullptr) return false;
14799 if (!isRequestHdrPlusCompatible(request, metadata)) return false;
14800
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014801 // Get a YUV buffer from pic channel.
14802 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14803 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14804 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14805 if (res != OK) {
14806 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14807 __FUNCTION__, strerror(-res), res);
14808 return false;
14809 }
14810
14811 pbcamera::StreamBuffer buffer;
14812 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014813 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chencec36ed2017-07-21 13:54:29 -070014814 buffer.data = yuvBuffer->fd == -1 ? yuvBuffer->buffer : nullptr;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014815 buffer.dataSize = yuvBuffer->frame_len;
14816
14817 pbcamera::CaptureRequest pbRequest;
14818 pbRequest.id = request.frame_number;
14819 pbRequest.outputBuffers.push_back(buffer);
14820
14821 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070014822 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014823 if (res != OK) {
14824 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14825 strerror(-res), res);
14826 return false;
14827 }
14828
14829 hdrPlusRequest->yuvBuffer = yuvBuffer;
14830 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14831
14832 return true;
14833}
14834
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014835status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14836{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014837 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14838 return OK;
14839 }
14840
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014841 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014842 if (res != OK) {
14843 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14844 strerror(-res), res);
14845 return res;
14846 }
14847 gHdrPlusClientOpening = true;
14848
14849 return OK;
14850}
14851
Chien-Yu Chenee335912017-02-09 17:53:20 -080014852status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14853{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014854 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014855
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014856 if (mHdrPlusModeEnabled) {
14857 return OK;
14858 }
14859
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014860 // Check if gHdrPlusClient is opened or being opened.
14861 if (gHdrPlusClient == nullptr) {
14862 if (gHdrPlusClientOpening) {
14863 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14864 return OK;
14865 }
14866
14867 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014868 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014869 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14870 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014871 return res;
14872 }
14873
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014874 // When opening HDR+ client completes, HDR+ mode will be enabled.
14875 return OK;
14876
Chien-Yu Chenee335912017-02-09 17:53:20 -080014877 }
14878
14879 // Configure stream for HDR+.
14880 res = configureHdrPlusStreamsLocked();
14881 if (res != OK) {
14882 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014883 return res;
14884 }
14885
14886 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14887 res = gHdrPlusClient->setZslHdrPlusMode(true);
14888 if (res != OK) {
14889 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014890 return res;
14891 }
14892
14893 mHdrPlusModeEnabled = true;
14894 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14895
14896 return OK;
14897}
14898
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014899void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
14900{
14901 if (gHdrPlusClientOpening) {
14902 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
14903 }
14904}
14905
Chien-Yu Chenee335912017-02-09 17:53:20 -080014906void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14907{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014908 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014909 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014910 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14911 if (res != OK) {
14912 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14913 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014914
14915 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014916 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014917 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014918 }
14919
14920 mHdrPlusModeEnabled = false;
14921 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14922}
14923
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070014924bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
14925{
14926 // Check if mPictureChannel is valid.
14927 // TODO: Support YUV (b/36693254) and RAW (b/36690506)
14928 if (mPictureChannel == nullptr) {
14929 return false;
14930 }
14931
14932 return true;
14933}
14934
Chien-Yu Chenee335912017-02-09 17:53:20 -080014935status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014936{
14937 pbcamera::InputConfiguration inputConfig;
14938 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14939 status_t res = OK;
14940
14941 // Configure HDR+ client streams.
14942 // Get input config.
14943 if (mHdrPlusRawSrcChannel) {
14944 // HDR+ input buffers will be provided by HAL.
14945 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14946 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14947 if (res != OK) {
14948 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14949 __FUNCTION__, strerror(-res), res);
14950 return res;
14951 }
14952
14953 inputConfig.isSensorInput = false;
14954 } else {
14955 // Sensor MIPI will send data to Easel.
14956 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014957 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014958 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14959 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14960 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14961 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14962 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014963 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014964 if (mSensorModeInfo.num_raw_bits != 10) {
14965 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14966 mSensorModeInfo.num_raw_bits);
14967 return BAD_VALUE;
14968 }
14969
14970 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014971 }
14972
14973 // Get output configurations.
14974 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014975 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014976
14977 // Easel may need to output YUV output buffers if mPictureChannel was created.
14978 pbcamera::StreamConfiguration yuvOutputConfig;
14979 if (mPictureChannel != nullptr) {
14980 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14981 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14982 if (res != OK) {
14983 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14984 __FUNCTION__, strerror(-res), res);
14985
14986 return res;
14987 }
14988
14989 outputStreamConfigs.push_back(yuvOutputConfig);
14990 }
14991
14992 // TODO: consider other channels for YUV output buffers.
14993
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014994 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014995 if (res != OK) {
14996 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14997 strerror(-res), res);
14998 return res;
14999 }
15000
15001 return OK;
15002}
15003
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015004void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
15005{
15006 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
15007 // Set HAL state to error.
15008 pthread_mutex_lock(&mMutex);
15009 mState = ERROR;
15010 pthread_mutex_unlock(&mMutex);
15011
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015012 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015013}
15014
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015015void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15016{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015017 if (client == nullptr) {
15018 ALOGE("%s: Opened client is null.", __FUNCTION__);
15019 return;
15020 }
15021
Chien-Yu Chene96475e2017-04-11 11:53:26 -070015022 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015023 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15024
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015025 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015026 if (!gHdrPlusClientOpening) {
15027 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15028 return;
15029 }
15030
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015031 gHdrPlusClient = std::move(client);
15032 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015033 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015034
15035 // Set static metadata.
15036 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15037 if (res != OK) {
15038 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15039 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015040 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015041 gHdrPlusClient = nullptr;
15042 return;
15043 }
15044
15045 // Enable HDR+ mode.
15046 res = enableHdrPlusModeLocked();
15047 if (res != OK) {
15048 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15049 }
15050}
15051
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015052void QCamera3HardwareInterface::onOpenFailed(status_t err)
15053{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015054 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015055 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015056 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015057 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015058}
15059
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015060void QCamera3HardwareInterface::onFatalError()
15061{
15062 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
15063
15064 // Set HAL state to error.
15065 pthread_mutex_lock(&mMutex);
15066 mState = ERROR;
15067 pthread_mutex_unlock(&mMutex);
15068
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015069 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015070}
15071
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070015072void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15073{
15074 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15075 __LINE__, requestId, apSensorTimestampNs);
15076
15077 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15078}
15079
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015080void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
15081{
15082 pthread_mutex_lock(&mMutex);
15083
15084 // Find the pending request for this result metadata.
15085 auto requestIter = mPendingRequestsList.begin();
15086 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15087 requestIter++;
15088 }
15089
15090 if (requestIter == mPendingRequestsList.end()) {
15091 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15092 pthread_mutex_unlock(&mMutex);
15093 return;
15094 }
15095
15096 requestIter->partial_result_cnt++;
15097
15098 CameraMetadata metadata;
15099 uint8_t ready = true;
15100 metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
15101
15102 // Send it to framework.
15103 camera3_capture_result_t result = {};
15104
15105 result.result = metadata.getAndLock();
15106 // Populate metadata result
15107 result.frame_number = requestId;
15108 result.num_output_buffers = 0;
15109 result.output_buffers = NULL;
15110 result.partial_result = requestIter->partial_result_cnt;
15111
15112 orchestrateResult(&result);
15113 metadata.unlock(result.result);
15114
15115 pthread_mutex_unlock(&mMutex);
15116}
15117
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070015118void QCamera3HardwareInterface::onPostview(uint32_t requestId,
15119 std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
15120 uint32_t stride, int32_t format)
15121{
15122 if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
15123 ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
15124 __LINE__, width, height, requestId);
15125 char buf[FILENAME_MAX] = {};
15126 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
15127 requestId, width, height);
15128
15129 pbcamera::StreamConfiguration config = {};
15130 config.image.width = width;
15131 config.image.height = height;
15132 config.image.format = format;
15133
15134 pbcamera::PlaneConfiguration plane = {};
15135 plane.stride = stride;
15136 plane.scanline = height;
15137
15138 config.image.planes.push_back(plane);
15139
15140 pbcamera::StreamBuffer buffer = {};
15141 buffer.streamId = 0;
15142 buffer.dmaBufFd = -1;
15143 buffer.data = postview->data();
15144 buffer.dataSize = postview->size();
15145
15146 hdrplus_client_utils::writePpm(buf, config, buffer);
15147 }
15148
15149 pthread_mutex_lock(&mMutex);
15150
15151 // Find the pending request for this result metadata.
15152 auto requestIter = mPendingRequestsList.begin();
15153 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15154 requestIter++;
15155 }
15156
15157 if (requestIter == mPendingRequestsList.end()) {
15158 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15159 pthread_mutex_unlock(&mMutex);
15160 return;
15161 }
15162
15163 requestIter->partial_result_cnt++;
15164
15165 CameraMetadata metadata;
15166 int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
15167 static_cast<int32_t>(stride)};
15168 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
15169 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
15170
15171 // Send it to framework.
15172 camera3_capture_result_t result = {};
15173
15174 result.result = metadata.getAndLock();
15175 // Populate metadata result
15176 result.frame_number = requestId;
15177 result.num_output_buffers = 0;
15178 result.output_buffers = NULL;
15179 result.partial_result = requestIter->partial_result_cnt;
15180
15181 orchestrateResult(&result);
15182 metadata.unlock(result.result);
15183
15184 pthread_mutex_unlock(&mMutex);
15185}
15186
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015187void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015188 const camera_metadata_t &resultMetadata)
15189{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015190 if (result != nullptr) {
15191 if (result->outputBuffers.size() != 1) {
15192 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
15193 result->outputBuffers.size());
15194 return;
15195 }
15196
15197 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
15198 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
15199 result->outputBuffers[0].streamId);
15200 return;
15201 }
15202
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015203 // TODO (b/34854987): initiate this from HDR+ service.
15204 onNextCaptureReady(result->requestId);
15205
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015206 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015207 HdrPlusPendingRequest pendingRequest;
15208 {
15209 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15210 auto req = mHdrPlusPendingRequests.find(result->requestId);
15211 pendingRequest = req->second;
15212 }
15213
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015214 // Update the result metadata with the settings of the HDR+ still capture request because
15215 // the result metadata belongs to a ZSL buffer.
15216 CameraMetadata metadata;
15217 metadata = &resultMetadata;
15218 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15219 camera_metadata_t* updatedResultMetadata = metadata.release();
15220
15221 QCamera3PicChannel *picChannel =
15222 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
15223
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015224 // Check if dumping HDR+ YUV output is enabled.
15225 char prop[PROPERTY_VALUE_MAX];
15226 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15227 bool dumpYuvOutput = atoi(prop);
15228
15229 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015230 // Dump yuv buffer to a ppm file.
15231 pbcamera::StreamConfiguration outputConfig;
15232 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
15233 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
15234 if (rc == OK) {
15235 char buf[FILENAME_MAX] = {};
15236 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15237 result->requestId, result->outputBuffers[0].streamId,
15238 outputConfig.image.width, outputConfig.image.height);
15239
15240 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
15241 } else {
15242 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
15243 __FUNCTION__, strerror(-rc), rc);
15244 }
15245 }
15246
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015247 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
15248 auto halMetadata = std::make_shared<metadata_buffer_t>();
15249 clear_metadata_buffer(halMetadata.get());
15250
15251 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
15252 // encoding.
15253 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15254 halStreamId, /*minFrameDuration*/0);
15255 if (res == OK) {
15256 // Return the buffer to pic channel for encoding.
15257 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
15258 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
15259 halMetadata);
15260 } else {
15261 // Return the buffer without encoding.
15262 // TODO: This should not happen but we may want to report an error buffer to camera
15263 // service.
15264 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
15265 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
15266 strerror(-res), res);
15267 }
15268
15269 // Send HDR+ metadata to framework.
15270 {
15271 pthread_mutex_lock(&mMutex);
15272
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015273 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15274 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015275 pthread_mutex_unlock(&mMutex);
15276 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015277
15278 // Remove the HDR+ pending request.
15279 {
15280 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15281 auto req = mHdrPlusPendingRequests.find(result->requestId);
15282 mHdrPlusPendingRequests.erase(req);
15283 }
15284 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015285}
15286
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015287void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15288{
15289 if (failedResult == nullptr) {
15290 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15291 return;
15292 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015293
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015294 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015295
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015296 // Remove the pending HDR+ request.
15297 {
15298 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15299 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
15300
15301 // Return the buffer to pic channel.
15302 QCamera3PicChannel *picChannel =
15303 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
15304 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
15305
15306 mHdrPlusPendingRequests.erase(pendingRequest);
15307 }
15308
15309 pthread_mutex_lock(&mMutex);
15310
15311 // Find the pending buffers.
15312 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15313 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15314 if (pendingBuffers->frame_number == failedResult->requestId) {
15315 break;
15316 }
15317 pendingBuffers++;
15318 }
15319
15320 // Send out buffer errors for the pending buffers.
15321 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15322 std::vector<camera3_stream_buffer_t> streamBuffers;
15323 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15324 // Prepare a stream buffer.
15325 camera3_stream_buffer_t streamBuffer = {};
15326 streamBuffer.stream = buffer.stream;
15327 streamBuffer.buffer = buffer.buffer;
15328 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15329 streamBuffer.acquire_fence = -1;
15330 streamBuffer.release_fence = -1;
15331
15332 streamBuffers.push_back(streamBuffer);
15333
15334 // Send out error buffer event.
15335 camera3_notify_msg_t notify_msg = {};
15336 notify_msg.type = CAMERA3_MSG_ERROR;
15337 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15338 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15339 notify_msg.message.error.error_stream = buffer.stream;
15340
15341 orchestrateNotify(&notify_msg);
15342 }
15343
15344 camera3_capture_result_t result = {};
15345 result.frame_number = pendingBuffers->frame_number;
15346 result.num_output_buffers = streamBuffers.size();
15347 result.output_buffers = &streamBuffers[0];
15348
15349 // Send out result with buffer errors.
15350 orchestrateResult(&result);
15351
15352 // Remove pending buffers.
15353 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15354 }
15355
15356 // Remove pending request.
15357 auto halRequest = mPendingRequestsList.begin();
15358 while (halRequest != mPendingRequestsList.end()) {
15359 if (halRequest->frame_number == failedResult->requestId) {
15360 mPendingRequestsList.erase(halRequest);
15361 break;
15362 }
15363 halRequest++;
15364 }
15365
15366 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015367}
15368
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015369
15370ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15371 mParent(parent) {}
15372
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015373void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015374{
15375 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015376
15377 if (isReprocess) {
15378 mReprocessShutters.emplace(frameNumber, Shutter());
15379 } else {
15380 mShutters.emplace(frameNumber, Shutter());
15381 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015382}
15383
15384void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15385{
15386 std::lock_guard<std::mutex> lock(mLock);
15387
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015388 std::map<uint32_t, Shutter> *shutters = nullptr;
15389
15390 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015391 auto shutter = mShutters.find(frameNumber);
15392 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015393 shutter = mReprocessShutters.find(frameNumber);
15394 if (shutter == mReprocessShutters.end()) {
15395 // Shutter was already sent.
15396 return;
15397 }
15398 shutters = &mReprocessShutters;
15399 } else {
15400 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015401 }
15402
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015403 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015404 shutter->second.ready = true;
15405 shutter->second.timestamp = timestamp;
15406
15407 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015408 shutter = shutters->begin();
15409 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015410 if (!shutter->second.ready) {
15411 // If this shutter is not ready, the following shutters can't be sent.
15412 break;
15413 }
15414
15415 camera3_notify_msg_t msg = {};
15416 msg.type = CAMERA3_MSG_SHUTTER;
15417 msg.message.shutter.frame_number = shutter->first;
15418 msg.message.shutter.timestamp = shutter->second.timestamp;
15419 mParent->orchestrateNotify(&msg);
15420
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015421 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015422 }
15423}
15424
15425void ShutterDispatcher::clear(uint32_t frameNumber)
15426{
15427 std::lock_guard<std::mutex> lock(mLock);
15428 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015429 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015430}
15431
15432void ShutterDispatcher::clear()
15433{
15434 std::lock_guard<std::mutex> lock(mLock);
15435
15436 // Log errors for stale shutters.
15437 for (auto &shutter : mShutters) {
15438 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15439 __FUNCTION__, shutter.first, shutter.second.ready,
15440 shutter.second.timestamp);
15441 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015442
15443 // Log errors for stale reprocess shutters.
15444 for (auto &shutter : mReprocessShutters) {
15445 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15446 __FUNCTION__, shutter.first, shutter.second.ready,
15447 shutter.second.timestamp);
15448 }
15449
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015450 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015451 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015452}
15453
15454OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15455 mParent(parent) {}
15456
15457status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15458{
15459 std::lock_guard<std::mutex> lock(mLock);
15460 mStreamBuffers.clear();
15461 if (!streamList) {
15462 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15463 return -EINVAL;
15464 }
15465
15466 // Create a "frame-number -> buffer" map for each stream.
15467 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15468 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15469 }
15470
15471 return OK;
15472}
15473
15474status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15475{
15476 std::lock_guard<std::mutex> lock(mLock);
15477
15478 // Find the "frame-number -> buffer" map for the stream.
15479 auto buffers = mStreamBuffers.find(stream);
15480 if (buffers == mStreamBuffers.end()) {
15481 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15482 return -EINVAL;
15483 }
15484
15485 // Create an unready buffer for this frame number.
15486 buffers->second.emplace(frameNumber, Buffer());
15487 return OK;
15488}
15489
15490void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15491 const camera3_stream_buffer_t &buffer)
15492{
15493 std::lock_guard<std::mutex> lock(mLock);
15494
15495 // Find the frame number -> buffer map for the stream.
15496 auto buffers = mStreamBuffers.find(buffer.stream);
15497 if (buffers == mStreamBuffers.end()) {
15498 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15499 return;
15500 }
15501
15502 // Find the unready buffer this frame number and mark it ready.
15503 auto pendingBuffer = buffers->second.find(frameNumber);
15504 if (pendingBuffer == buffers->second.end()) {
15505 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15506 return;
15507 }
15508
15509 pendingBuffer->second.ready = true;
15510 pendingBuffer->second.buffer = buffer;
15511
15512 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15513 pendingBuffer = buffers->second.begin();
15514 while (pendingBuffer != buffers->second.end()) {
15515 if (!pendingBuffer->second.ready) {
15516 // If this buffer is not ready, the following buffers can't be sent.
15517 break;
15518 }
15519
15520 camera3_capture_result_t result = {};
15521 result.frame_number = pendingBuffer->first;
15522 result.num_output_buffers = 1;
15523 result.output_buffers = &pendingBuffer->second.buffer;
15524
15525 // Send out result with buffer errors.
15526 mParent->orchestrateResult(&result);
15527
15528 pendingBuffer = buffers->second.erase(pendingBuffer);
15529 }
15530}
15531
15532void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15533{
15534 std::lock_guard<std::mutex> lock(mLock);
15535
15536 // Log errors for stale buffers.
15537 for (auto &buffers : mStreamBuffers) {
15538 for (auto &buffer : buffers.second) {
15539 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15540 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15541 }
15542 buffers.second.clear();
15543 }
15544
15545 if (clearConfiguredStreams) {
15546 mStreamBuffers.clear();
15547 }
15548}
15549
Thierry Strudel3d639192016-09-09 11:52:26 -070015550}; //end namespace qcamera