blob: 9667142fc0643ccfa45c7c651e8e582c8f1cc4bd [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070075// mm_camera has 2 partial results: 3A, and final result.
76// HDR+ requests have 3 partial results: postview, next request ready, and final result.
77#define PARTIAL_RESULT_COUNT 3
Thierry Strudel3d639192016-09-09 11:52:26 -070078#define FRAME_SKIP_DELAY 0
79
80#define MAX_VALUE_8BIT ((1<<8)-1)
81#define MAX_VALUE_10BIT ((1<<10)-1)
82#define MAX_VALUE_12BIT ((1<<12)-1)
83
84#define VIDEO_4K_WIDTH 3840
85#define VIDEO_4K_HEIGHT 2160
86
Jason Leeb9e76432017-03-10 17:14:19 -080087#define MAX_EIS_WIDTH 3840
88#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070089
90#define MAX_RAW_STREAMS 1
91#define MAX_STALLING_STREAMS 1
92#define MAX_PROCESSED_STREAMS 3
93/* Batch mode is enabled only if FPS set is equal to or greater than this */
94#define MIN_FPS_FOR_BATCH_MODE (120)
95#define PREVIEW_FPS_FOR_HFR (30)
96#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080097#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070098#define MAX_HFR_BATCH_SIZE (8)
99#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +0100101#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700137// Whether to check for the GPU stride padding, or use the default
138//#define CHECK_GPU_PIXEL_ALIGNMENT
139
Thierry Strudel3d639192016-09-09 11:52:26 -0700140cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142extern pthread_mutex_t gCamLock;
143volatile uint32_t gCamHal3LogLevel = 1;
144extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800146// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700148std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700152std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700155
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800156// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
157bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700158
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700159std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700160
Thierry Strudel3d639192016-09-09 11:52:26 -0700161
162const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
163 {"On", CAM_CDS_MODE_ON},
164 {"Off", CAM_CDS_MODE_OFF},
165 {"Auto",CAM_CDS_MODE_AUTO}
166};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700167const QCamera3HardwareInterface::QCameraMap<
168 camera_metadata_enum_android_video_hdr_mode_t,
169 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
170 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
171 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
172};
173
Thierry Strudel54dc9782017-02-15 12:12:10 -0800174const QCamera3HardwareInterface::QCameraMap<
175 camera_metadata_enum_android_binning_correction_mode_t,
176 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
177 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
178 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
179};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700180
181const QCamera3HardwareInterface::QCameraMap<
182 camera_metadata_enum_android_ir_mode_t,
183 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
184 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
185 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
186 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
187};
Thierry Strudel3d639192016-09-09 11:52:26 -0700188
189const QCamera3HardwareInterface::QCameraMap<
190 camera_metadata_enum_android_control_effect_mode_t,
191 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
192 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
193 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
194 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
195 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
197 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
198 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
199 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
201};
202
203const QCamera3HardwareInterface::QCameraMap<
204 camera_metadata_enum_android_control_awb_mode_t,
205 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
206 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
207 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
208 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
209 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
210 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
211 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
212 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
214 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
215};
216
217const QCamera3HardwareInterface::QCameraMap<
218 camera_metadata_enum_android_control_scene_mode_t,
219 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
220 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
221 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
222 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
223 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
224 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
226 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
227 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
228 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
229 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
230 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
231 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
232 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
233 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
234 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800235 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
236 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700237};
238
239const QCamera3HardwareInterface::QCameraMap<
240 camera_metadata_enum_android_control_af_mode_t,
241 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
242 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
244 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
245 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
246 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
247 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
249};
250
251const QCamera3HardwareInterface::QCameraMap<
252 camera_metadata_enum_android_color_correction_aberration_mode_t,
253 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
255 CAM_COLOR_CORRECTION_ABERRATION_OFF },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
257 CAM_COLOR_CORRECTION_ABERRATION_FAST },
258 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
259 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
260};
261
262const QCamera3HardwareInterface::QCameraMap<
263 camera_metadata_enum_android_control_ae_antibanding_mode_t,
264 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
269};
270
271const QCamera3HardwareInterface::QCameraMap<
272 camera_metadata_enum_android_control_ae_mode_t,
273 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
274 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
275 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
277 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
Shuzhen Wang3d11a642017-08-18 09:57:29 -0700278 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
279 { (camera_metadata_enum_android_control_ae_mode_t)
280 NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
Thierry Strudel3d639192016-09-09 11:52:26 -0700281};
282
283const QCamera3HardwareInterface::QCameraMap<
284 camera_metadata_enum_android_flash_mode_t,
285 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
286 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
287 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
288 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
289};
290
291const QCamera3HardwareInterface::QCameraMap<
292 camera_metadata_enum_android_statistics_face_detect_mode_t,
293 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
297};
298
299const QCamera3HardwareInterface::QCameraMap<
300 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
301 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
303 CAM_FOCUS_UNCALIBRATED },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
305 CAM_FOCUS_APPROXIMATE },
306 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
307 CAM_FOCUS_CALIBRATED }
308};
309
310const QCamera3HardwareInterface::QCameraMap<
311 camera_metadata_enum_android_lens_state_t,
312 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
313 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
314 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
315};
316
317const int32_t available_thumbnail_sizes[] = {0, 0,
318 176, 144,
319 240, 144,
320 256, 144,
321 240, 160,
322 256, 154,
323 240, 240,
324 320, 240};
325
326const QCamera3HardwareInterface::QCameraMap<
327 camera_metadata_enum_android_sensor_test_pattern_mode_t,
328 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
335};
336
337/* Since there is no mapping for all the options some Android enum are not listed.
338 * Also, the order in this list is important because while mapping from HAL to Android it will
339 * traverse from lower to higher index which means that for HAL values that are map to different
340 * Android values, the traverse logic will select the first one found.
341 */
342const QCamera3HardwareInterface::QCameraMap<
343 camera_metadata_enum_android_sensor_reference_illuminant1_t,
344 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
361};
362
363const QCamera3HardwareInterface::QCameraMap<
364 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
365 { 60, CAM_HFR_MODE_60FPS},
366 { 90, CAM_HFR_MODE_90FPS},
367 { 120, CAM_HFR_MODE_120FPS},
368 { 150, CAM_HFR_MODE_150FPS},
369 { 180, CAM_HFR_MODE_180FPS},
370 { 210, CAM_HFR_MODE_210FPS},
371 { 240, CAM_HFR_MODE_240FPS},
372 { 480, CAM_HFR_MODE_480FPS},
373};
374
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700375const QCamera3HardwareInterface::QCameraMap<
376 qcamera3_ext_instant_aec_mode_t,
377 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
378 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
381};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800382
383const QCamera3HardwareInterface::QCameraMap<
384 qcamera3_ext_exposure_meter_mode_t,
385 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
386 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
387 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
388 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
389 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
390 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
391 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
392 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
393};
394
395const QCamera3HardwareInterface::QCameraMap<
396 qcamera3_ext_iso_mode_t,
397 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
398 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
399 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
400 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
401 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
402 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
403 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
404 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
405 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
406};
407
Thierry Strudel3d639192016-09-09 11:52:26 -0700408camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
409 .initialize = QCamera3HardwareInterface::initialize,
410 .configure_streams = QCamera3HardwareInterface::configure_streams,
411 .register_stream_buffers = NULL,
412 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
413 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
414 .get_metadata_vendor_tag_ops = NULL,
415 .dump = QCamera3HardwareInterface::dump,
416 .flush = QCamera3HardwareInterface::flush,
417 .reserved = {0},
418};
419
420// initialise to some default value
421uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
422
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700423static inline void logEaselEvent(const char *tag, const char *event) {
424 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
425 struct timespec ts = {};
426 static int64_t kMsPerSec = 1000;
427 static int64_t kNsPerMs = 1000000;
428 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
429 if (res != OK) {
430 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
431 } else {
432 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
433 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
434 }
435 }
436}
437
Thierry Strudel3d639192016-09-09 11:52:26 -0700438/*===========================================================================
439 * FUNCTION : QCamera3HardwareInterface
440 *
441 * DESCRIPTION: constructor of QCamera3HardwareInterface
442 *
443 * PARAMETERS :
444 * @cameraId : camera ID
445 *
446 * RETURN : none
447 *==========================================================================*/
448QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
449 const camera_module_callbacks_t *callbacks)
450 : mCameraId(cameraId),
451 mCameraHandle(NULL),
452 mCameraInitialized(false),
453 mCallbackOps(NULL),
454 mMetadataChannel(NULL),
455 mPictureChannel(NULL),
456 mRawChannel(NULL),
457 mSupportChannel(NULL),
458 mAnalysisChannel(NULL),
459 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700460 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700461 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800462 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100463 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800464 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700465 mChannelHandle(0),
466 mFirstConfiguration(true),
467 mFlush(false),
468 mFlushPerf(false),
469 mParamHeap(NULL),
470 mParameters(NULL),
471 mPrevParameters(NULL),
472 m_bIsVideo(false),
473 m_bIs4KVideo(false),
474 m_bEisSupportedSize(false),
475 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800476 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700477 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700478 mShutterDispatcher(this),
479 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700480 mMinProcessedFrameDuration(0),
481 mMinJpegFrameDuration(0),
482 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100483 mExpectedFrameDuration(0),
484 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700485 mMetaFrameCount(0U),
486 mUpdateDebugLevel(false),
487 mCallbacks(callbacks),
488 mCaptureIntent(0),
489 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800490 /* DevCamDebug metadata internal m control*/
491 mDevCamDebugMetaEnable(0),
492 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700493 mBatchSize(0),
494 mToBeQueuedVidBufs(0),
495 mHFRVideoFps(DEFAULT_VIDEO_FPS),
496 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800497 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800498 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700499 mFirstFrameNumberInBatch(0),
500 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800501 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700502 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
503 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000504 mPDSupported(false),
505 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700506 mInstantAEC(false),
507 mResetInstantAEC(false),
508 mInstantAECSettledFrameNumber(0),
509 mAecSkipDisplayFrameBound(0),
510 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700511 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800512 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700513 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700514 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700515 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700516 mState(CLOSED),
517 mIsDeviceLinked(false),
518 mIsMainCamera(true),
519 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700520 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800521 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800522 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700523 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800524 mIsApInputUsedForHdrPlus(false),
525 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700526 m_bSensorHDREnabled(false),
527 mAfTrigger()
Thierry Strudel3d639192016-09-09 11:52:26 -0700528{
529 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700530 mCommon.init(gCamCapability[cameraId]);
531 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700532#ifndef USE_HAL_3_3
533 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
534#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700535 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700536#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700537 mCameraDevice.common.close = close_camera_device;
538 mCameraDevice.ops = &mCameraOps;
539 mCameraDevice.priv = this;
540 gCamCapability[cameraId]->version = CAM_HAL_V3;
541 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
542 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
543 gCamCapability[cameraId]->min_num_pp_bufs = 3;
544
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800545 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700546
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800547 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700548 mPendingLiveRequest = 0;
549 mCurrentRequestId = -1;
550 pthread_mutex_init(&mMutex, NULL);
551
552 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
553 mDefaultMetadata[i] = NULL;
554
555 // Getting system props of different kinds
556 char prop[PROPERTY_VALUE_MAX];
557 memset(prop, 0, sizeof(prop));
558 property_get("persist.camera.raw.dump", prop, "0");
559 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800560 property_get("persist.camera.hal3.force.hdr", prop, "0");
561 mForceHdrSnapshot = atoi(prop);
562
Thierry Strudel3d639192016-09-09 11:52:26 -0700563 if (mEnableRawDump)
564 LOGD("Raw dump from Camera HAL enabled");
565
566 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
567 memset(mLdafCalib, 0, sizeof(mLdafCalib));
568
Arnd Geis082a4d72017-08-24 10:33:07 -0700569 memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
570
Thierry Strudel3d639192016-09-09 11:52:26 -0700571 memset(prop, 0, sizeof(prop));
572 property_get("persist.camera.tnr.preview", prop, "0");
573 m_bTnrPreview = (uint8_t)atoi(prop);
574
575 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800576 property_get("persist.camera.swtnr.preview", prop, "1");
577 m_bSwTnrPreview = (uint8_t)atoi(prop);
578
579 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700580 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700581 m_bTnrVideo = (uint8_t)atoi(prop);
582
583 memset(prop, 0, sizeof(prop));
584 property_get("persist.camera.avtimer.debug", prop, "0");
585 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800586 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700587
Thierry Strudel54dc9782017-02-15 12:12:10 -0800588 memset(prop, 0, sizeof(prop));
589 property_get("persist.camera.cacmode.disable", prop, "0");
590 m_cacModeDisabled = (uint8_t)atoi(prop);
591
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700592 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700593 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700594
Thierry Strudel3d639192016-09-09 11:52:26 -0700595 //Load and read GPU library.
596 lib_surface_utils = NULL;
597 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700598 mSurfaceStridePadding = CAM_PAD_TO_64;
599#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700600 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
601 if (lib_surface_utils) {
602 *(void **)&LINK_get_surface_pixel_alignment =
603 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
604 if (LINK_get_surface_pixel_alignment) {
605 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
606 }
607 dlclose(lib_surface_utils);
608 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700609#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000610 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
611 mPDSupported = (0 <= mPDIndex) ? true : false;
612
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700613 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700614}
615
616/*===========================================================================
617 * FUNCTION : ~QCamera3HardwareInterface
618 *
619 * DESCRIPTION: destructor of QCamera3HardwareInterface
620 *
621 * PARAMETERS : none
622 *
623 * RETURN : none
624 *==========================================================================*/
625QCamera3HardwareInterface::~QCamera3HardwareInterface()
626{
627 LOGD("E");
628
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800629 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700630
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800631 // Disable power hint and enable the perf lock for close camera
632 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
633 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
634
635 // unlink of dualcam during close camera
636 if (mIsDeviceLinked) {
637 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
638 &m_pDualCamCmdPtr->bundle_info;
639 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
640 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
641 pthread_mutex_lock(&gCamLock);
642
643 if (mIsMainCamera == 1) {
644 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
645 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
646 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
647 // related session id should be session id of linked session
648 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
649 } else {
650 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
651 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
652 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
653 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
654 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800655 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800656 pthread_mutex_unlock(&gCamLock);
657
658 rc = mCameraHandle->ops->set_dual_cam_cmd(
659 mCameraHandle->camera_handle);
660 if (rc < 0) {
661 LOGE("Dualcam: Unlink failed, but still proceed to close");
662 }
663 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700664
665 /* We need to stop all streams before deleting any stream */
666 if (mRawDumpChannel) {
667 mRawDumpChannel->stop();
668 }
669
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700670 if (mHdrPlusRawSrcChannel) {
671 mHdrPlusRawSrcChannel->stop();
672 }
673
Thierry Strudel3d639192016-09-09 11:52:26 -0700674 // NOTE: 'camera3_stream_t *' objects are already freed at
675 // this stage by the framework
676 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
677 it != mStreamInfo.end(); it++) {
678 QCamera3ProcessingChannel *channel = (*it)->channel;
679 if (channel) {
680 channel->stop();
681 }
682 }
683 if (mSupportChannel)
684 mSupportChannel->stop();
685
686 if (mAnalysisChannel) {
687 mAnalysisChannel->stop();
688 }
689 if (mMetadataChannel) {
690 mMetadataChannel->stop();
691 }
692 if (mChannelHandle) {
693 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -0700694 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700695 LOGD("stopping channel %d", mChannelHandle);
696 }
697
698 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
699 it != mStreamInfo.end(); it++) {
700 QCamera3ProcessingChannel *channel = (*it)->channel;
701 if (channel)
702 delete channel;
703 free (*it);
704 }
705 if (mSupportChannel) {
706 delete mSupportChannel;
707 mSupportChannel = NULL;
708 }
709
710 if (mAnalysisChannel) {
711 delete mAnalysisChannel;
712 mAnalysisChannel = NULL;
713 }
714 if (mRawDumpChannel) {
715 delete mRawDumpChannel;
716 mRawDumpChannel = NULL;
717 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700718 if (mHdrPlusRawSrcChannel) {
719 delete mHdrPlusRawSrcChannel;
720 mHdrPlusRawSrcChannel = NULL;
721 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700722 if (mDummyBatchChannel) {
723 delete mDummyBatchChannel;
724 mDummyBatchChannel = NULL;
725 }
726
727 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800728 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700729
730 if (mMetadataChannel) {
731 delete mMetadataChannel;
732 mMetadataChannel = NULL;
733 }
734
735 /* Clean up all channels */
736 if (mCameraInitialized) {
737 if(!mFirstConfiguration){
738 //send the last unconfigure
739 cam_stream_size_info_t stream_config_info;
740 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
741 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
742 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800743 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700744 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700745 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700746 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
747 stream_config_info);
748 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
749 if (rc < 0) {
750 LOGE("set_parms failed for unconfigure");
751 }
752 }
753 deinitParameters();
754 }
755
756 if (mChannelHandle) {
757 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
758 mChannelHandle);
759 LOGH("deleting channel %d", mChannelHandle);
760 mChannelHandle = 0;
761 }
762
763 if (mState != CLOSED)
764 closeCamera();
765
766 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
767 req.mPendingBufferList.clear();
768 }
769 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700770 for (pendingRequestIterator i = mPendingRequestsList.begin();
771 i != mPendingRequestsList.end();) {
772 i = erasePendingRequest(i);
773 }
774 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
775 if (mDefaultMetadata[i])
776 free_camera_metadata(mDefaultMetadata[i]);
777
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800778 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700779
780 pthread_cond_destroy(&mRequestCond);
781
782 pthread_cond_destroy(&mBuffersCond);
783
784 pthread_mutex_destroy(&mMutex);
785 LOGD("X");
786}
787
788/*===========================================================================
789 * FUNCTION : erasePendingRequest
790 *
791 * DESCRIPTION: function to erase a desired pending request after freeing any
792 * allocated memory
793 *
794 * PARAMETERS :
795 * @i : iterator pointing to pending request to be erased
796 *
797 * RETURN : iterator pointing to the next request
798 *==========================================================================*/
799QCamera3HardwareInterface::pendingRequestIterator
800 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
801{
802 if (i->input_buffer != NULL) {
803 free(i->input_buffer);
804 i->input_buffer = NULL;
805 }
806 if (i->settings != NULL)
807 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100808
809 mExpectedInflightDuration -= i->expectedFrameDuration;
810 if (mExpectedInflightDuration < 0) {
811 LOGE("Negative expected in-flight duration!");
812 mExpectedInflightDuration = 0;
813 }
814
Thierry Strudel3d639192016-09-09 11:52:26 -0700815 return mPendingRequestsList.erase(i);
816}
817
818/*===========================================================================
819 * FUNCTION : camEvtHandle
820 *
821 * DESCRIPTION: Function registered to mm-camera-interface to handle events
822 *
823 * PARAMETERS :
824 * @camera_handle : interface layer camera handle
825 * @evt : ptr to event
826 * @user_data : user data ptr
827 *
828 * RETURN : none
829 *==========================================================================*/
830void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
831 mm_camera_event_t *evt,
832 void *user_data)
833{
834 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
835 if (obj && evt) {
836 switch(evt->server_event_type) {
837 case CAM_EVENT_TYPE_DAEMON_DIED:
838 pthread_mutex_lock(&obj->mMutex);
839 obj->mState = ERROR;
840 pthread_mutex_unlock(&obj->mMutex);
841 LOGE("Fatal, camera daemon died");
842 break;
843
844 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
845 LOGD("HAL got request pull from Daemon");
846 pthread_mutex_lock(&obj->mMutex);
847 obj->mWokenUpByDaemon = true;
848 obj->unblockRequestIfNecessary();
849 pthread_mutex_unlock(&obj->mMutex);
850 break;
851
852 default:
853 LOGW("Warning: Unhandled event %d",
854 evt->server_event_type);
855 break;
856 }
857 } else {
858 LOGE("NULL user_data/evt");
859 }
860}
861
862/*===========================================================================
863 * FUNCTION : openCamera
864 *
865 * DESCRIPTION: open camera
866 *
867 * PARAMETERS :
868 * @hw_device : double ptr for camera device struct
869 *
870 * RETURN : int32_t type of status
871 * NO_ERROR -- success
872 * none-zero failure code
873 *==========================================================================*/
874int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
875{
876 int rc = 0;
877 if (mState != CLOSED) {
878 *hw_device = NULL;
879 return PERMISSION_DENIED;
880 }
881
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700882 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800883 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700884 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
885 mCameraId);
886
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700887 if (mCameraHandle) {
888 LOGE("Failure: Camera already opened");
889 return ALREADY_EXISTS;
890 }
891
892 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700893 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700894 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700895 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700896 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700897 if (rc != 0) {
898 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
899 return rc;
900 }
901 }
902 }
903
Thierry Strudel3d639192016-09-09 11:52:26 -0700904 rc = openCamera();
905 if (rc == 0) {
906 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800907 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700908 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700909
910 // Suspend Easel because opening camera failed.
911 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700912 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700913 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
914 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700915 if (suspendErr != 0) {
916 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
917 strerror(-suspendErr), suspendErr);
918 }
919 }
920 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800921 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700922
Thierry Strudel3d639192016-09-09 11:52:26 -0700923 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
924 mCameraId, rc);
925
926 if (rc == NO_ERROR) {
927 mState = OPENED;
928 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800929
Thierry Strudel3d639192016-09-09 11:52:26 -0700930 return rc;
931}
932
933/*===========================================================================
934 * FUNCTION : openCamera
935 *
936 * DESCRIPTION: open camera
937 *
938 * PARAMETERS : none
939 *
940 * RETURN : int32_t type of status
941 * NO_ERROR -- success
942 * none-zero failure code
943 *==========================================================================*/
944int QCamera3HardwareInterface::openCamera()
945{
946 int rc = 0;
947 char value[PROPERTY_VALUE_MAX];
948
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800949 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800950
Thierry Strudel3d639192016-09-09 11:52:26 -0700951 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
952 if (rc < 0) {
953 LOGE("Failed to reserve flash for camera id: %d",
954 mCameraId);
955 return UNKNOWN_ERROR;
956 }
957
958 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
959 if (rc) {
960 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
961 return rc;
962 }
963
964 if (!mCameraHandle) {
965 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
966 return -ENODEV;
967 }
968
969 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
970 camEvtHandle, (void *)this);
971
972 if (rc < 0) {
973 LOGE("Error, failed to register event callback");
974 /* Not closing camera here since it is already handled in destructor */
975 return FAILED_TRANSACTION;
976 }
977
978 mExifParams.debug_params =
979 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
980 if (mExifParams.debug_params) {
981 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
982 } else {
983 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
984 return NO_MEMORY;
985 }
986 mFirstConfiguration = true;
987
988 //Notify display HAL that a camera session is active.
989 //But avoid calling the same during bootup because camera service might open/close
990 //cameras at boot time during its initialization and display service will also internally
991 //wait for camera service to initialize first while calling this display API, resulting in a
992 //deadlock situation. Since boot time camera open/close calls are made only to fetch
993 //capabilities, no need of this display bw optimization.
994 //Use "service.bootanim.exit" property to know boot status.
995 property_get("service.bootanim.exit", value, "0");
996 if (atoi(value) == 1) {
997 pthread_mutex_lock(&gCamLock);
998 if (gNumCameraSessions++ == 0) {
999 setCameraLaunchStatus(true);
1000 }
1001 pthread_mutex_unlock(&gCamLock);
1002 }
1003
1004 //fill the session id needed while linking dual cam
1005 pthread_mutex_lock(&gCamLock);
1006 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1007 &sessionId[mCameraId]);
1008 pthread_mutex_unlock(&gCamLock);
1009
1010 if (rc < 0) {
1011 LOGE("Error, failed to get sessiion id");
1012 return UNKNOWN_ERROR;
1013 } else {
1014 //Allocate related cam sync buffer
1015 //this is needed for the payload that goes along with bundling cmd for related
1016 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001017 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1018 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001019 if(rc != OK) {
1020 rc = NO_MEMORY;
1021 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1022 return NO_MEMORY;
1023 }
1024
1025 //Map memory for related cam sync buffer
1026 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001027 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1028 m_pDualCamCmdHeap->getFd(0),
1029 sizeof(cam_dual_camera_cmd_info_t),
1030 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001031 if(rc < 0) {
1032 LOGE("Dualcam: failed to map Related cam sync buffer");
1033 rc = FAILED_TRANSACTION;
1034 return NO_MEMORY;
1035 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001036 m_pDualCamCmdPtr =
1037 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001038 }
1039
1040 LOGH("mCameraId=%d",mCameraId);
1041
1042 return NO_ERROR;
1043}
1044
1045/*===========================================================================
1046 * FUNCTION : closeCamera
1047 *
1048 * DESCRIPTION: close camera
1049 *
1050 * PARAMETERS : none
1051 *
1052 * RETURN : int32_t type of status
1053 * NO_ERROR -- success
1054 * none-zero failure code
1055 *==========================================================================*/
1056int QCamera3HardwareInterface::closeCamera()
1057{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001058 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001059 int rc = NO_ERROR;
1060 char value[PROPERTY_VALUE_MAX];
1061
1062 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1063 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001064
1065 // unmap memory for related cam sync buffer
1066 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001067 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001068 if (NULL != m_pDualCamCmdHeap) {
1069 m_pDualCamCmdHeap->deallocate();
1070 delete m_pDualCamCmdHeap;
1071 m_pDualCamCmdHeap = NULL;
1072 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001073 }
1074
Thierry Strudel3d639192016-09-09 11:52:26 -07001075 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1076 mCameraHandle = NULL;
1077
1078 //reset session id to some invalid id
1079 pthread_mutex_lock(&gCamLock);
1080 sessionId[mCameraId] = 0xDEADBEEF;
1081 pthread_mutex_unlock(&gCamLock);
1082
1083 //Notify display HAL that there is no active camera session
1084 //but avoid calling the same during bootup. Refer to openCamera
1085 //for more details.
1086 property_get("service.bootanim.exit", value, "0");
1087 if (atoi(value) == 1) {
1088 pthread_mutex_lock(&gCamLock);
1089 if (--gNumCameraSessions == 0) {
1090 setCameraLaunchStatus(false);
1091 }
1092 pthread_mutex_unlock(&gCamLock);
1093 }
1094
Thierry Strudel3d639192016-09-09 11:52:26 -07001095 if (mExifParams.debug_params) {
1096 free(mExifParams.debug_params);
1097 mExifParams.debug_params = NULL;
1098 }
1099 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1100 LOGW("Failed to release flash for camera id: %d",
1101 mCameraId);
1102 }
1103 mState = CLOSED;
1104 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1105 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001106
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001107 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001108 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1109 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001110 if (gHdrPlusClient != nullptr) {
1111 // Disable HDR+ mode.
1112 disableHdrPlusModeLocked();
1113 // Disconnect Easel if it's connected.
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001114 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001115 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001116 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001117
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001118 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001119 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001120 if (rc != 0) {
1121 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1122 }
1123
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001124 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001125 if (rc != 0) {
1126 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1127 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001128 }
1129 }
1130
Thierry Strudel3d639192016-09-09 11:52:26 -07001131 return rc;
1132}
1133
1134/*===========================================================================
1135 * FUNCTION : initialize
1136 *
1137 * DESCRIPTION: Initialize frameworks callback functions
1138 *
1139 * PARAMETERS :
1140 * @callback_ops : callback function to frameworks
1141 *
1142 * RETURN :
1143 *
1144 *==========================================================================*/
1145int QCamera3HardwareInterface::initialize(
1146 const struct camera3_callback_ops *callback_ops)
1147{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001148 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001149 int rc;
1150
1151 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1152 pthread_mutex_lock(&mMutex);
1153
1154 // Validate current state
1155 switch (mState) {
1156 case OPENED:
1157 /* valid state */
1158 break;
1159 default:
1160 LOGE("Invalid state %d", mState);
1161 rc = -ENODEV;
1162 goto err1;
1163 }
1164
1165 rc = initParameters();
1166 if (rc < 0) {
1167 LOGE("initParamters failed %d", rc);
1168 goto err1;
1169 }
1170 mCallbackOps = callback_ops;
1171
1172 mChannelHandle = mCameraHandle->ops->add_channel(
1173 mCameraHandle->camera_handle, NULL, NULL, this);
1174 if (mChannelHandle == 0) {
1175 LOGE("add_channel failed");
1176 rc = -ENOMEM;
1177 pthread_mutex_unlock(&mMutex);
1178 return rc;
1179 }
1180
1181 pthread_mutex_unlock(&mMutex);
1182 mCameraInitialized = true;
1183 mState = INITIALIZED;
1184 LOGI("X");
1185 return 0;
1186
1187err1:
1188 pthread_mutex_unlock(&mMutex);
1189 return rc;
1190}
1191
1192/*===========================================================================
1193 * FUNCTION : validateStreamDimensions
1194 *
1195 * DESCRIPTION: Check if the configuration requested are those advertised
1196 *
1197 * PARAMETERS :
1198 * @stream_list : streams to be configured
1199 *
1200 * RETURN :
1201 *
1202 *==========================================================================*/
1203int QCamera3HardwareInterface::validateStreamDimensions(
1204 camera3_stream_configuration_t *streamList)
1205{
1206 int rc = NO_ERROR;
1207 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001208 uint32_t depthWidth = 0;
1209 uint32_t depthHeight = 0;
1210 if (mPDSupported) {
1211 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1212 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1213 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001214
1215 camera3_stream_t *inputStream = NULL;
1216 /*
1217 * Loop through all streams to find input stream if it exists*
1218 */
1219 for (size_t i = 0; i< streamList->num_streams; i++) {
1220 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1221 if (inputStream != NULL) {
1222 LOGE("Error, Multiple input streams requested");
1223 return -EINVAL;
1224 }
1225 inputStream = streamList->streams[i];
1226 }
1227 }
1228 /*
1229 * Loop through all streams requested in configuration
1230 * Check if unsupported sizes have been requested on any of them
1231 */
1232 for (size_t j = 0; j < streamList->num_streams; j++) {
1233 bool sizeFound = false;
1234 camera3_stream_t *newStream = streamList->streams[j];
1235
1236 uint32_t rotatedHeight = newStream->height;
1237 uint32_t rotatedWidth = newStream->width;
1238 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1239 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1240 rotatedHeight = newStream->width;
1241 rotatedWidth = newStream->height;
1242 }
1243
1244 /*
1245 * Sizes are different for each type of stream format check against
1246 * appropriate table.
1247 */
1248 switch (newStream->format) {
1249 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1250 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1251 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001252 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1253 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1254 mPDSupported) {
1255 if ((depthWidth == newStream->width) &&
1256 (depthHeight == newStream->height)) {
1257 sizeFound = true;
1258 }
1259 break;
1260 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001261 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1262 for (size_t i = 0; i < count; i++) {
1263 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1264 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1265 sizeFound = true;
1266 break;
1267 }
1268 }
1269 break;
1270 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001271 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1272 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001273 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001274 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001275 if ((depthSamplesCount == newStream->width) &&
1276 (1 == newStream->height)) {
1277 sizeFound = true;
1278 }
1279 break;
1280 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001281 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1282 /* Verify set size against generated sizes table */
1283 for (size_t i = 0; i < count; i++) {
1284 if (((int32_t)rotatedWidth ==
1285 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1286 ((int32_t)rotatedHeight ==
1287 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1288 sizeFound = true;
1289 break;
1290 }
1291 }
1292 break;
1293 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1294 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1295 default:
1296 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1297 || newStream->stream_type == CAMERA3_STREAM_INPUT
1298 || IS_USAGE_ZSL(newStream->usage)) {
1299 if (((int32_t)rotatedWidth ==
1300 gCamCapability[mCameraId]->active_array_size.width) &&
1301 ((int32_t)rotatedHeight ==
1302 gCamCapability[mCameraId]->active_array_size.height)) {
1303 sizeFound = true;
1304 break;
1305 }
1306 /* We could potentially break here to enforce ZSL stream
1307 * set from frameworks always is full active array size
1308 * but it is not clear from the spc if framework will always
1309 * follow that, also we have logic to override to full array
1310 * size, so keeping the logic lenient at the moment
1311 */
1312 }
1313 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1314 MAX_SIZES_CNT);
1315 for (size_t i = 0; i < count; i++) {
1316 if (((int32_t)rotatedWidth ==
1317 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1318 ((int32_t)rotatedHeight ==
1319 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1320 sizeFound = true;
1321 break;
1322 }
1323 }
1324 break;
1325 } /* End of switch(newStream->format) */
1326
1327 /* We error out even if a single stream has unsupported size set */
1328 if (!sizeFound) {
1329 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1330 rotatedWidth, rotatedHeight, newStream->format,
1331 gCamCapability[mCameraId]->active_array_size.width,
1332 gCamCapability[mCameraId]->active_array_size.height);
1333 rc = -EINVAL;
1334 break;
1335 }
1336 } /* End of for each stream */
1337 return rc;
1338}
1339
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001340/*===========================================================================
1341 * FUNCTION : validateUsageFlags
1342 *
1343 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1344 *
1345 * PARAMETERS :
1346 * @stream_list : streams to be configured
1347 *
1348 * RETURN :
1349 * NO_ERROR if the usage flags are supported
1350 * error code if usage flags are not supported
1351 *
1352 *==========================================================================*/
1353int QCamera3HardwareInterface::validateUsageFlags(
1354 const camera3_stream_configuration_t* streamList)
1355{
1356 for (size_t j = 0; j < streamList->num_streams; j++) {
1357 const camera3_stream_t *newStream = streamList->streams[j];
1358
1359 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1360 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1361 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1362 continue;
1363 }
1364
Jason Leec4cf5032017-05-24 18:31:41 -07001365 // Here we only care whether it's EIS3 or not
1366 char is_type_value[PROPERTY_VALUE_MAX];
1367 property_get("persist.camera.is_type", is_type_value, "4");
1368 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1369 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1370 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1371 isType = IS_TYPE_NONE;
1372
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001373 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1374 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1375 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1376 bool forcePreviewUBWC = true;
1377 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1378 forcePreviewUBWC = false;
1379 }
1380 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001381 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001382 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001383 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001384 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001385 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001386
1387 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1388 // So color spaces will always match.
1389
1390 // Check whether underlying formats of shared streams match.
1391 if (isVideo && isPreview && videoFormat != previewFormat) {
1392 LOGE("Combined video and preview usage flag is not supported");
1393 return -EINVAL;
1394 }
1395 if (isPreview && isZSL && previewFormat != zslFormat) {
1396 LOGE("Combined preview and zsl usage flag is not supported");
1397 return -EINVAL;
1398 }
1399 if (isVideo && isZSL && videoFormat != zslFormat) {
1400 LOGE("Combined video and zsl usage flag is not supported");
1401 return -EINVAL;
1402 }
1403 }
1404 return NO_ERROR;
1405}
1406
1407/*===========================================================================
1408 * FUNCTION : validateUsageFlagsForEis
1409 *
1410 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1411 *
1412 * PARAMETERS :
1413 * @stream_list : streams to be configured
1414 *
1415 * RETURN :
1416 * NO_ERROR if the usage flags are supported
1417 * error code if usage flags are not supported
1418 *
1419 *==========================================================================*/
1420int QCamera3HardwareInterface::validateUsageFlagsForEis(
1421 const camera3_stream_configuration_t* streamList)
1422{
1423 for (size_t j = 0; j < streamList->num_streams; j++) {
1424 const camera3_stream_t *newStream = streamList->streams[j];
1425
1426 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1427 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1428
1429 // Because EIS is "hard-coded" for certain use case, and current
1430 // implementation doesn't support shared preview and video on the same
1431 // stream, return failure if EIS is forced on.
1432 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1433 LOGE("Combined video and preview usage flag is not supported due to EIS");
1434 return -EINVAL;
1435 }
1436 }
1437 return NO_ERROR;
1438}
1439
Thierry Strudel3d639192016-09-09 11:52:26 -07001440/*==============================================================================
1441 * FUNCTION : isSupportChannelNeeded
1442 *
1443 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1444 *
1445 * PARAMETERS :
1446 * @stream_list : streams to be configured
1447 * @stream_config_info : the config info for streams to be configured
1448 *
1449 * RETURN : Boolen true/false decision
1450 *
1451 *==========================================================================*/
1452bool QCamera3HardwareInterface::isSupportChannelNeeded(
1453 camera3_stream_configuration_t *streamList,
1454 cam_stream_size_info_t stream_config_info)
1455{
1456 uint32_t i;
1457 bool pprocRequested = false;
1458 /* Check for conditions where PProc pipeline does not have any streams*/
1459 for (i = 0; i < stream_config_info.num_streams; i++) {
1460 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1461 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1462 pprocRequested = true;
1463 break;
1464 }
1465 }
1466
1467 if (pprocRequested == false )
1468 return true;
1469
1470 /* Dummy stream needed if only raw or jpeg streams present */
1471 for (i = 0; i < streamList->num_streams; i++) {
1472 switch(streamList->streams[i]->format) {
1473 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1474 case HAL_PIXEL_FORMAT_RAW10:
1475 case HAL_PIXEL_FORMAT_RAW16:
1476 case HAL_PIXEL_FORMAT_BLOB:
1477 break;
1478 default:
1479 return false;
1480 }
1481 }
1482 return true;
1483}
1484
1485/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001486 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001487 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001488 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001489 *
1490 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001491 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001492 *
1493 * RETURN : int32_t type of status
1494 * NO_ERROR -- success
1495 * none-zero failure code
1496 *
1497 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001498int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001499{
1500 int32_t rc = NO_ERROR;
1501
1502 cam_dimension_t max_dim = {0, 0};
1503 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1504 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1505 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1506 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1507 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1508 }
1509
1510 clear_metadata_buffer(mParameters);
1511
1512 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1513 max_dim);
1514 if (rc != NO_ERROR) {
1515 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1516 return rc;
1517 }
1518
1519 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1520 if (rc != NO_ERROR) {
1521 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1522 return rc;
1523 }
1524
1525 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001526 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001527
1528 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1529 mParameters);
1530 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001531 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001532 return rc;
1533 }
1534
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001535 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001536 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1537 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1538 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1539 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1540 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001541
1542 return rc;
1543}
1544
1545/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001546 * FUNCTION : getCurrentSensorModeInfo
1547 *
1548 * DESCRIPTION: Get sensor mode information that is currently selected.
1549 *
1550 * PARAMETERS :
1551 * @sensorModeInfo : sensor mode information (output)
1552 *
1553 * RETURN : int32_t type of status
1554 * NO_ERROR -- success
1555 * none-zero failure code
1556 *
1557 *==========================================================================*/
1558int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1559{
1560 int32_t rc = NO_ERROR;
1561
1562 clear_metadata_buffer(mParameters);
1563 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1564
1565 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1566 mParameters);
1567 if (rc != NO_ERROR) {
1568 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1569 return rc;
1570 }
1571
1572 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1573 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1574 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1575 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1576 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1577 sensorModeInfo.num_raw_bits);
1578
1579 return rc;
1580}
1581
1582/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001583 * FUNCTION : addToPPFeatureMask
1584 *
1585 * DESCRIPTION: add additional features to pp feature mask based on
1586 * stream type and usecase
1587 *
1588 * PARAMETERS :
1589 * @stream_format : stream type for feature mask
1590 * @stream_idx : stream idx within postprocess_mask list to change
1591 *
1592 * RETURN : NULL
1593 *
1594 *==========================================================================*/
1595void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1596 uint32_t stream_idx)
1597{
1598 char feature_mask_value[PROPERTY_VALUE_MAX];
1599 cam_feature_mask_t feature_mask;
1600 int args_converted;
1601 int property_len;
1602
1603 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001604#ifdef _LE_CAMERA_
1605 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1606 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1607 property_len = property_get("persist.camera.hal3.feature",
1608 feature_mask_value, swtnr_feature_mask_value);
1609#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001610 property_len = property_get("persist.camera.hal3.feature",
1611 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001612#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001613 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1614 (feature_mask_value[1] == 'x')) {
1615 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1616 } else {
1617 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1618 }
1619 if (1 != args_converted) {
1620 feature_mask = 0;
1621 LOGE("Wrong feature mask %s", feature_mask_value);
1622 return;
1623 }
1624
1625 switch (stream_format) {
1626 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1627 /* Add LLVD to pp feature mask only if video hint is enabled */
1628 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1629 mStreamConfigInfo.postprocess_mask[stream_idx]
1630 |= CAM_QTI_FEATURE_SW_TNR;
1631 LOGH("Added SW TNR to pp feature mask");
1632 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1633 mStreamConfigInfo.postprocess_mask[stream_idx]
1634 |= CAM_QCOM_FEATURE_LLVD;
1635 LOGH("Added LLVD SeeMore to pp feature mask");
1636 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001637 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1638 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1639 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1640 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001641 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1642 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1643 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1644 CAM_QTI_FEATURE_BINNING_CORRECTION;
1645 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001646 break;
1647 }
1648 default:
1649 break;
1650 }
1651 LOGD("PP feature mask %llx",
1652 mStreamConfigInfo.postprocess_mask[stream_idx]);
1653}
1654
1655/*==============================================================================
1656 * FUNCTION : updateFpsInPreviewBuffer
1657 *
1658 * DESCRIPTION: update FPS information in preview buffer.
1659 *
1660 * PARAMETERS :
1661 * @metadata : pointer to metadata buffer
1662 * @frame_number: frame_number to look for in pending buffer list
1663 *
1664 * RETURN : None
1665 *
1666 *==========================================================================*/
1667void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1668 uint32_t frame_number)
1669{
1670 // Mark all pending buffers for this particular request
1671 // with corresponding framerate information
1672 for (List<PendingBuffersInRequest>::iterator req =
1673 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1674 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1675 for(List<PendingBufferInfo>::iterator j =
1676 req->mPendingBufferList.begin();
1677 j != req->mPendingBufferList.end(); j++) {
1678 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1679 if ((req->frame_number == frame_number) &&
1680 (channel->getStreamTypeMask() &
1681 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1682 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1683 CAM_INTF_PARM_FPS_RANGE, metadata) {
1684 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1685 struct private_handle_t *priv_handle =
1686 (struct private_handle_t *)(*(j->buffer));
1687 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1688 }
1689 }
1690 }
1691 }
1692}
1693
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001694/*==============================================================================
1695 * FUNCTION : updateTimeStampInPendingBuffers
1696 *
1697 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1698 * of a frame number
1699 *
1700 * PARAMETERS :
1701 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1702 * @timestamp : timestamp to be set
1703 *
1704 * RETURN : None
1705 *
1706 *==========================================================================*/
1707void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1708 uint32_t frameNumber, nsecs_t timestamp)
1709{
1710 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1711 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1712 if (req->frame_number != frameNumber)
1713 continue;
1714
1715 for (auto k = req->mPendingBufferList.begin();
1716 k != req->mPendingBufferList.end(); k++ ) {
1717 struct private_handle_t *priv_handle =
1718 (struct private_handle_t *) (*(k->buffer));
1719 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1720 }
1721 }
1722 return;
1723}
1724
Thierry Strudel3d639192016-09-09 11:52:26 -07001725/*===========================================================================
1726 * FUNCTION : configureStreams
1727 *
1728 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1729 * and output streams.
1730 *
1731 * PARAMETERS :
1732 * @stream_list : streams to be configured
1733 *
1734 * RETURN :
1735 *
1736 *==========================================================================*/
1737int QCamera3HardwareInterface::configureStreams(
1738 camera3_stream_configuration_t *streamList)
1739{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001740 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001741 int rc = 0;
1742
1743 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001744 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001745 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001746 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001747
1748 return rc;
1749}
1750
1751/*===========================================================================
1752 * FUNCTION : configureStreamsPerfLocked
1753 *
1754 * DESCRIPTION: configureStreams while perfLock is held.
1755 *
1756 * PARAMETERS :
1757 * @stream_list : streams to be configured
1758 *
1759 * RETURN : int32_t type of status
1760 * NO_ERROR -- success
1761 * none-zero failure code
1762 *==========================================================================*/
1763int QCamera3HardwareInterface::configureStreamsPerfLocked(
1764 camera3_stream_configuration_t *streamList)
1765{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001766 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001767 int rc = 0;
1768
1769 // Sanity check stream_list
1770 if (streamList == NULL) {
1771 LOGE("NULL stream configuration");
1772 return BAD_VALUE;
1773 }
1774 if (streamList->streams == NULL) {
1775 LOGE("NULL stream list");
1776 return BAD_VALUE;
1777 }
1778
1779 if (streamList->num_streams < 1) {
1780 LOGE("Bad number of streams requested: %d",
1781 streamList->num_streams);
1782 return BAD_VALUE;
1783 }
1784
1785 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1786 LOGE("Maximum number of streams %d exceeded: %d",
1787 MAX_NUM_STREAMS, streamList->num_streams);
1788 return BAD_VALUE;
1789 }
1790
Jason Leec4cf5032017-05-24 18:31:41 -07001791 mOpMode = streamList->operation_mode;
1792 LOGD("mOpMode: %d", mOpMode);
1793
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001794 rc = validateUsageFlags(streamList);
1795 if (rc != NO_ERROR) {
1796 return rc;
1797 }
1798
Thierry Strudel3d639192016-09-09 11:52:26 -07001799 /* first invalidate all the steams in the mStreamList
1800 * if they appear again, they will be validated */
1801 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1802 it != mStreamInfo.end(); it++) {
1803 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1804 if (channel) {
1805 channel->stop();
1806 }
1807 (*it)->status = INVALID;
1808 }
1809
1810 if (mRawDumpChannel) {
1811 mRawDumpChannel->stop();
1812 delete mRawDumpChannel;
1813 mRawDumpChannel = NULL;
1814 }
1815
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001816 if (mHdrPlusRawSrcChannel) {
1817 mHdrPlusRawSrcChannel->stop();
1818 delete mHdrPlusRawSrcChannel;
1819 mHdrPlusRawSrcChannel = NULL;
1820 }
1821
Thierry Strudel3d639192016-09-09 11:52:26 -07001822 if (mSupportChannel)
1823 mSupportChannel->stop();
1824
1825 if (mAnalysisChannel) {
1826 mAnalysisChannel->stop();
1827 }
1828 if (mMetadataChannel) {
1829 /* If content of mStreamInfo is not 0, there is metadata stream */
1830 mMetadataChannel->stop();
1831 }
1832 if (mChannelHandle) {
1833 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07001834 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001835 LOGD("stopping channel %d", mChannelHandle);
1836 }
1837
1838 pthread_mutex_lock(&mMutex);
1839
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001840 mPictureChannel = NULL;
1841
Thierry Strudel3d639192016-09-09 11:52:26 -07001842 // Check state
1843 switch (mState) {
1844 case INITIALIZED:
1845 case CONFIGURED:
1846 case STARTED:
1847 /* valid state */
1848 break;
1849 default:
1850 LOGE("Invalid state %d", mState);
1851 pthread_mutex_unlock(&mMutex);
1852 return -ENODEV;
1853 }
1854
1855 /* Check whether we have video stream */
1856 m_bIs4KVideo = false;
1857 m_bIsVideo = false;
1858 m_bEisSupportedSize = false;
1859 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001860 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001861 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001862 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001863 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001864 uint32_t videoWidth = 0U;
1865 uint32_t videoHeight = 0U;
1866 size_t rawStreamCnt = 0;
1867 size_t stallStreamCnt = 0;
1868 size_t processedStreamCnt = 0;
1869 // Number of streams on ISP encoder path
1870 size_t numStreamsOnEncoder = 0;
1871 size_t numYuv888OnEncoder = 0;
1872 bool bYuv888OverrideJpeg = false;
1873 cam_dimension_t largeYuv888Size = {0, 0};
1874 cam_dimension_t maxViewfinderSize = {0, 0};
1875 bool bJpegExceeds4K = false;
1876 bool bJpegOnEncoder = false;
1877 bool bUseCommonFeatureMask = false;
1878 cam_feature_mask_t commonFeatureMask = 0;
1879 bool bSmallJpegSize = false;
1880 uint32_t width_ratio;
1881 uint32_t height_ratio;
1882 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1883 camera3_stream_t *inputStream = NULL;
1884 bool isJpeg = false;
1885 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001886 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001887 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001888
1889 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1890
1891 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001892 uint8_t eis_prop_set;
1893 uint32_t maxEisWidth = 0;
1894 uint32_t maxEisHeight = 0;
1895
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001896 // Initialize all instant AEC related variables
1897 mInstantAEC = false;
1898 mResetInstantAEC = false;
1899 mInstantAECSettledFrameNumber = 0;
1900 mAecSkipDisplayFrameBound = 0;
1901 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001902 mCurrFeatureState = 0;
1903 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001904
Thierry Strudel3d639192016-09-09 11:52:26 -07001905 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1906
1907 size_t count = IS_TYPE_MAX;
1908 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1909 for (size_t i = 0; i < count; i++) {
1910 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001911 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1912 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001913 break;
1914 }
1915 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001916
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001917 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001918 maxEisWidth = MAX_EIS_WIDTH;
1919 maxEisHeight = MAX_EIS_HEIGHT;
1920 }
1921
1922 /* EIS setprop control */
1923 char eis_prop[PROPERTY_VALUE_MAX];
1924 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001925 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001926 eis_prop_set = (uint8_t)atoi(eis_prop);
1927
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001928 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001929 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1930
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001931 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1932 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001933
Thierry Strudel3d639192016-09-09 11:52:26 -07001934 /* stream configurations */
1935 for (size_t i = 0; i < streamList->num_streams; i++) {
1936 camera3_stream_t *newStream = streamList->streams[i];
1937 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1938 "height = %d, rotation = %d, usage = 0x%x",
1939 i, newStream->stream_type, newStream->format,
1940 newStream->width, newStream->height, newStream->rotation,
1941 newStream->usage);
1942 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1943 newStream->stream_type == CAMERA3_STREAM_INPUT){
1944 isZsl = true;
1945 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001946 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1947 IS_USAGE_PREVIEW(newStream->usage)) {
1948 isPreview = true;
1949 }
1950
Thierry Strudel3d639192016-09-09 11:52:26 -07001951 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1952 inputStream = newStream;
1953 }
1954
Emilian Peev7650c122017-01-19 08:24:33 -08001955 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1956 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001957 isJpeg = true;
1958 jpegSize.width = newStream->width;
1959 jpegSize.height = newStream->height;
1960 if (newStream->width > VIDEO_4K_WIDTH ||
1961 newStream->height > VIDEO_4K_HEIGHT)
1962 bJpegExceeds4K = true;
1963 }
1964
1965 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1966 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1967 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001968 // In HAL3 we can have multiple different video streams.
1969 // The variables video width and height are used below as
1970 // dimensions of the biggest of them
1971 if (videoWidth < newStream->width ||
1972 videoHeight < newStream->height) {
1973 videoWidth = newStream->width;
1974 videoHeight = newStream->height;
1975 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001976 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1977 (VIDEO_4K_HEIGHT <= newStream->height)) {
1978 m_bIs4KVideo = true;
1979 }
1980 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1981 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001982
Thierry Strudel3d639192016-09-09 11:52:26 -07001983 }
1984 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1985 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1986 switch (newStream->format) {
1987 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001988 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1989 depthPresent = true;
1990 break;
1991 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001992 stallStreamCnt++;
1993 if (isOnEncoder(maxViewfinderSize, newStream->width,
1994 newStream->height)) {
1995 numStreamsOnEncoder++;
1996 bJpegOnEncoder = true;
1997 }
1998 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1999 newStream->width);
2000 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
2001 newStream->height);;
2002 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2003 "FATAL: max_downscale_factor cannot be zero and so assert");
2004 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2005 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2006 LOGH("Setting small jpeg size flag to true");
2007 bSmallJpegSize = true;
2008 }
2009 break;
2010 case HAL_PIXEL_FORMAT_RAW10:
2011 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2012 case HAL_PIXEL_FORMAT_RAW16:
2013 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002014 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2015 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2016 pdStatCount++;
2017 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002018 break;
2019 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2020 processedStreamCnt++;
2021 if (isOnEncoder(maxViewfinderSize, newStream->width,
2022 newStream->height)) {
2023 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2024 !IS_USAGE_ZSL(newStream->usage)) {
2025 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2026 }
2027 numStreamsOnEncoder++;
2028 }
2029 break;
2030 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2031 processedStreamCnt++;
2032 if (isOnEncoder(maxViewfinderSize, newStream->width,
2033 newStream->height)) {
2034 // If Yuv888 size is not greater than 4K, set feature mask
2035 // to SUPERSET so that it support concurrent request on
2036 // YUV and JPEG.
2037 if (newStream->width <= VIDEO_4K_WIDTH &&
2038 newStream->height <= VIDEO_4K_HEIGHT) {
2039 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2040 }
2041 numStreamsOnEncoder++;
2042 numYuv888OnEncoder++;
2043 largeYuv888Size.width = newStream->width;
2044 largeYuv888Size.height = newStream->height;
2045 }
2046 break;
2047 default:
2048 processedStreamCnt++;
2049 if (isOnEncoder(maxViewfinderSize, newStream->width,
2050 newStream->height)) {
2051 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2052 numStreamsOnEncoder++;
2053 }
2054 break;
2055 }
2056
2057 }
2058 }
2059
2060 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2061 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2062 !m_bIsVideo) {
2063 m_bEisEnable = false;
2064 }
2065
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002066 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2067 pthread_mutex_unlock(&mMutex);
2068 return -EINVAL;
2069 }
2070
Thierry Strudel54dc9782017-02-15 12:12:10 -08002071 uint8_t forceEnableTnr = 0;
2072 char tnr_prop[PROPERTY_VALUE_MAX];
2073 memset(tnr_prop, 0, sizeof(tnr_prop));
2074 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2075 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2076
Thierry Strudel3d639192016-09-09 11:52:26 -07002077 /* Logic to enable/disable TNR based on specific config size/etc.*/
2078 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002079 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2080 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002081 else if (forceEnableTnr)
2082 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002083
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002084 char videoHdrProp[PROPERTY_VALUE_MAX];
2085 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2086 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2087 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2088
2089 if (hdr_mode_prop == 1 && m_bIsVideo &&
2090 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2091 m_bVideoHdrEnabled = true;
2092 else
2093 m_bVideoHdrEnabled = false;
2094
2095
Thierry Strudel3d639192016-09-09 11:52:26 -07002096 /* Check if num_streams is sane */
2097 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2098 rawStreamCnt > MAX_RAW_STREAMS ||
2099 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2100 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2101 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2102 pthread_mutex_unlock(&mMutex);
2103 return -EINVAL;
2104 }
2105 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002106 if (isZsl && m_bIs4KVideo) {
2107 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002108 pthread_mutex_unlock(&mMutex);
2109 return -EINVAL;
2110 }
2111 /* Check if stream sizes are sane */
2112 if (numStreamsOnEncoder > 2) {
2113 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2114 pthread_mutex_unlock(&mMutex);
2115 return -EINVAL;
2116 } else if (1 < numStreamsOnEncoder){
2117 bUseCommonFeatureMask = true;
2118 LOGH("Multiple streams above max viewfinder size, common mask needed");
2119 }
2120
2121 /* Check if BLOB size is greater than 4k in 4k recording case */
2122 if (m_bIs4KVideo && bJpegExceeds4K) {
2123 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2124 pthread_mutex_unlock(&mMutex);
2125 return -EINVAL;
2126 }
2127
Emilian Peev7650c122017-01-19 08:24:33 -08002128 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2129 depthPresent) {
2130 LOGE("HAL doesn't support depth streams in HFR mode!");
2131 pthread_mutex_unlock(&mMutex);
2132 return -EINVAL;
2133 }
2134
Thierry Strudel3d639192016-09-09 11:52:26 -07002135 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2136 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2137 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2138 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2139 // configurations:
2140 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2141 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2142 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2143 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2144 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2145 __func__);
2146 pthread_mutex_unlock(&mMutex);
2147 return -EINVAL;
2148 }
2149
2150 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2151 // the YUV stream's size is greater or equal to the JPEG size, set common
2152 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2153 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2154 jpegSize.width, jpegSize.height) &&
2155 largeYuv888Size.width > jpegSize.width &&
2156 largeYuv888Size.height > jpegSize.height) {
2157 bYuv888OverrideJpeg = true;
2158 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2159 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2160 }
2161
2162 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2163 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2164 commonFeatureMask);
2165 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2166 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2167
2168 rc = validateStreamDimensions(streamList);
2169 if (rc == NO_ERROR) {
2170 rc = validateStreamRotations(streamList);
2171 }
2172 if (rc != NO_ERROR) {
2173 LOGE("Invalid stream configuration requested!");
2174 pthread_mutex_unlock(&mMutex);
2175 return rc;
2176 }
2177
Emilian Peev0f3c3162017-03-15 12:57:46 +00002178 if (1 < pdStatCount) {
2179 LOGE("HAL doesn't support multiple PD streams");
2180 pthread_mutex_unlock(&mMutex);
2181 return -EINVAL;
2182 }
2183
2184 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2185 (1 == pdStatCount)) {
2186 LOGE("HAL doesn't support PD streams in HFR mode!");
2187 pthread_mutex_unlock(&mMutex);
2188 return -EINVAL;
2189 }
2190
Thierry Strudel3d639192016-09-09 11:52:26 -07002191 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2192 for (size_t i = 0; i < streamList->num_streams; i++) {
2193 camera3_stream_t *newStream = streamList->streams[i];
2194 LOGH("newStream type = %d, stream format = %d "
2195 "stream size : %d x %d, stream rotation = %d",
2196 newStream->stream_type, newStream->format,
2197 newStream->width, newStream->height, newStream->rotation);
2198 //if the stream is in the mStreamList validate it
2199 bool stream_exists = false;
2200 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2201 it != mStreamInfo.end(); it++) {
2202 if ((*it)->stream == newStream) {
2203 QCamera3ProcessingChannel *channel =
2204 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2205 stream_exists = true;
2206 if (channel)
2207 delete channel;
2208 (*it)->status = VALID;
2209 (*it)->stream->priv = NULL;
2210 (*it)->channel = NULL;
2211 }
2212 }
2213 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2214 //new stream
2215 stream_info_t* stream_info;
2216 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2217 if (!stream_info) {
2218 LOGE("Could not allocate stream info");
2219 rc = -ENOMEM;
2220 pthread_mutex_unlock(&mMutex);
2221 return rc;
2222 }
2223 stream_info->stream = newStream;
2224 stream_info->status = VALID;
2225 stream_info->channel = NULL;
2226 mStreamInfo.push_back(stream_info);
2227 }
2228 /* Covers Opaque ZSL and API1 F/W ZSL */
2229 if (IS_USAGE_ZSL(newStream->usage)
2230 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2231 if (zslStream != NULL) {
2232 LOGE("Multiple input/reprocess streams requested!");
2233 pthread_mutex_unlock(&mMutex);
2234 return BAD_VALUE;
2235 }
2236 zslStream = newStream;
2237 }
2238 /* Covers YUV reprocess */
2239 if (inputStream != NULL) {
2240 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2241 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2242 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2243 && inputStream->width == newStream->width
2244 && inputStream->height == newStream->height) {
2245 if (zslStream != NULL) {
2246 /* This scenario indicates multiple YUV streams with same size
2247 * as input stream have been requested, since zsl stream handle
2248 * is solely use for the purpose of overriding the size of streams
2249 * which share h/w streams we will just make a guess here as to
2250 * which of the stream is a ZSL stream, this will be refactored
2251 * once we make generic logic for streams sharing encoder output
2252 */
2253 LOGH("Warning, Multiple ip/reprocess streams requested!");
2254 }
2255 zslStream = newStream;
2256 }
2257 }
2258 }
2259
2260 /* If a zsl stream is set, we know that we have configured at least one input or
2261 bidirectional stream */
2262 if (NULL != zslStream) {
2263 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2264 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2265 mInputStreamInfo.format = zslStream->format;
2266 mInputStreamInfo.usage = zslStream->usage;
2267 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2268 mInputStreamInfo.dim.width,
2269 mInputStreamInfo.dim.height,
2270 mInputStreamInfo.format, mInputStreamInfo.usage);
2271 }
2272
2273 cleanAndSortStreamInfo();
2274 if (mMetadataChannel) {
2275 delete mMetadataChannel;
2276 mMetadataChannel = NULL;
2277 }
2278 if (mSupportChannel) {
2279 delete mSupportChannel;
2280 mSupportChannel = NULL;
2281 }
2282
2283 if (mAnalysisChannel) {
2284 delete mAnalysisChannel;
2285 mAnalysisChannel = NULL;
2286 }
2287
2288 if (mDummyBatchChannel) {
2289 delete mDummyBatchChannel;
2290 mDummyBatchChannel = NULL;
2291 }
2292
Emilian Peev7650c122017-01-19 08:24:33 -08002293 if (mDepthChannel) {
2294 mDepthChannel = NULL;
2295 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002296 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002297
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002298 mShutterDispatcher.clear();
2299 mOutputBufferDispatcher.clear();
2300
Thierry Strudel2896d122017-02-23 19:18:03 -08002301 char is_type_value[PROPERTY_VALUE_MAX];
2302 property_get("persist.camera.is_type", is_type_value, "4");
2303 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2304
Binhao Line406f062017-05-03 14:39:44 -07002305 char property_value[PROPERTY_VALUE_MAX];
2306 property_get("persist.camera.gzoom.at", property_value, "0");
2307 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002308 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2309 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2310 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2311 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002312
2313 property_get("persist.camera.gzoom.4k", property_value, "0");
2314 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2315
Thierry Strudel3d639192016-09-09 11:52:26 -07002316 //Create metadata channel and initialize it
2317 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2318 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2319 gCamCapability[mCameraId]->color_arrangement);
2320 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2321 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002322 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002323 if (mMetadataChannel == NULL) {
2324 LOGE("failed to allocate metadata channel");
2325 rc = -ENOMEM;
2326 pthread_mutex_unlock(&mMutex);
2327 return rc;
2328 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002329 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002330 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2331 if (rc < 0) {
2332 LOGE("metadata channel initialization failed");
2333 delete mMetadataChannel;
2334 mMetadataChannel = NULL;
2335 pthread_mutex_unlock(&mMutex);
2336 return rc;
2337 }
2338
Thierry Strudel2896d122017-02-23 19:18:03 -08002339 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002340 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002341 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002342 // Keep track of preview/video streams indices.
2343 // There could be more than one preview streams, but only one video stream.
2344 int32_t video_stream_idx = -1;
2345 int32_t preview_stream_idx[streamList->num_streams];
2346 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002347 bool previewTnr[streamList->num_streams];
2348 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2349 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2350 // Loop through once to determine preview TNR conditions before creating channels.
2351 for (size_t i = 0; i < streamList->num_streams; i++) {
2352 camera3_stream_t *newStream = streamList->streams[i];
2353 uint32_t stream_usage = newStream->usage;
2354 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2355 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2356 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2357 video_stream_idx = (int32_t)i;
2358 else
2359 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2360 }
2361 }
2362 // By default, preview stream TNR is disabled.
2363 // Enable TNR to the preview stream if all conditions below are satisfied:
2364 // 1. preview resolution == video resolution.
2365 // 2. video stream TNR is enabled.
2366 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2367 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2368 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2369 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2370 if (m_bTnrEnabled && m_bTnrVideo &&
2371 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2372 video_stream->width == preview_stream->width &&
2373 video_stream->height == preview_stream->height) {
2374 previewTnr[preview_stream_idx[i]] = true;
2375 }
2376 }
2377
Thierry Strudel3d639192016-09-09 11:52:26 -07002378 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2379 /* Allocate channel objects for the requested streams */
2380 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002381
Thierry Strudel3d639192016-09-09 11:52:26 -07002382 camera3_stream_t *newStream = streamList->streams[i];
2383 uint32_t stream_usage = newStream->usage;
2384 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2385 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2386 struct camera_info *p_info = NULL;
2387 pthread_mutex_lock(&gCamLock);
2388 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2389 pthread_mutex_unlock(&gCamLock);
2390 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2391 || IS_USAGE_ZSL(newStream->usage)) &&
2392 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002393 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002394 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002395 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2396 if (bUseCommonFeatureMask)
2397 zsl_ppmask = commonFeatureMask;
2398 else
2399 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002400 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002401 if (numStreamsOnEncoder > 0)
2402 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2403 else
2404 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002405 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002406 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002407 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002408 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002409 LOGH("Input stream configured, reprocess config");
2410 } else {
2411 //for non zsl streams find out the format
2412 switch (newStream->format) {
2413 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2414 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002415 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002416 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2417 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2418 /* add additional features to pp feature mask */
2419 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2420 mStreamConfigInfo.num_streams);
2421
2422 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2423 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2424 CAM_STREAM_TYPE_VIDEO;
2425 if (m_bTnrEnabled && m_bTnrVideo) {
2426 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2427 CAM_QCOM_FEATURE_CPP_TNR;
2428 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2429 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2430 ~CAM_QCOM_FEATURE_CDS;
2431 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002432 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2433 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2434 CAM_QTI_FEATURE_PPEISCORE;
2435 }
Binhao Line406f062017-05-03 14:39:44 -07002436 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2438 CAM_QCOM_FEATURE_GOOG_ZOOM;
2439 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002440 } else {
2441 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2442 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002443 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002444 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2445 CAM_QCOM_FEATURE_CPP_TNR;
2446 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2447 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2448 ~CAM_QCOM_FEATURE_CDS;
2449 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002450 if(!m_bSwTnrPreview) {
2451 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2452 ~CAM_QTI_FEATURE_SW_TNR;
2453 }
Binhao Line406f062017-05-03 14:39:44 -07002454 if (is_goog_zoom_preview_enabled) {
2455 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2456 CAM_QCOM_FEATURE_GOOG_ZOOM;
2457 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002458 padding_info.width_padding = mSurfaceStridePadding;
2459 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002460 previewSize.width = (int32_t)newStream->width;
2461 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002462 }
2463 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2464 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2465 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2466 newStream->height;
2467 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2468 newStream->width;
2469 }
2470 }
2471 break;
2472 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002473 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002474 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2475 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2476 if (bUseCommonFeatureMask)
2477 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2478 commonFeatureMask;
2479 else
2480 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2481 CAM_QCOM_FEATURE_NONE;
2482 } else {
2483 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2484 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2485 }
2486 break;
2487 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002488 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002489 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2490 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2491 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2492 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2493 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002494 /* Remove rotation if it is not supported
2495 for 4K LiveVideo snapshot case (online processing) */
2496 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2497 CAM_QCOM_FEATURE_ROTATION)) {
2498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2499 &= ~CAM_QCOM_FEATURE_ROTATION;
2500 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002501 } else {
2502 if (bUseCommonFeatureMask &&
2503 isOnEncoder(maxViewfinderSize, newStream->width,
2504 newStream->height)) {
2505 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2506 } else {
2507 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2508 }
2509 }
2510 if (isZsl) {
2511 if (zslStream) {
2512 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2513 (int32_t)zslStream->width;
2514 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2515 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002516 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2517 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002518 } else {
2519 LOGE("Error, No ZSL stream identified");
2520 pthread_mutex_unlock(&mMutex);
2521 return -EINVAL;
2522 }
2523 } else if (m_bIs4KVideo) {
2524 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2525 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2526 } else if (bYuv888OverrideJpeg) {
2527 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2528 (int32_t)largeYuv888Size.width;
2529 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2530 (int32_t)largeYuv888Size.height;
2531 }
2532 break;
2533 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2534 case HAL_PIXEL_FORMAT_RAW16:
2535 case HAL_PIXEL_FORMAT_RAW10:
2536 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2537 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2538 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002539 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2540 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2541 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2542 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2543 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2544 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2545 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2546 gCamCapability[mCameraId]->dt[mPDIndex];
2547 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2548 gCamCapability[mCameraId]->vc[mPDIndex];
2549 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002550 break;
2551 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002552 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002553 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2554 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2555 break;
2556 }
2557 }
2558
2559 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2560 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2561 gCamCapability[mCameraId]->color_arrangement);
2562
2563 if (newStream->priv == NULL) {
2564 //New stream, construct channel
2565 switch (newStream->stream_type) {
2566 case CAMERA3_STREAM_INPUT:
2567 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2568 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2569 break;
2570 case CAMERA3_STREAM_BIDIRECTIONAL:
2571 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2572 GRALLOC_USAGE_HW_CAMERA_WRITE;
2573 break;
2574 case CAMERA3_STREAM_OUTPUT:
2575 /* For video encoding stream, set read/write rarely
2576 * flag so that they may be set to un-cached */
2577 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2578 newStream->usage |=
2579 (GRALLOC_USAGE_SW_READ_RARELY |
2580 GRALLOC_USAGE_SW_WRITE_RARELY |
2581 GRALLOC_USAGE_HW_CAMERA_WRITE);
2582 else if (IS_USAGE_ZSL(newStream->usage))
2583 {
2584 LOGD("ZSL usage flag skipping");
2585 }
2586 else if (newStream == zslStream
2587 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2588 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2589 } else
2590 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2591 break;
2592 default:
2593 LOGE("Invalid stream_type %d", newStream->stream_type);
2594 break;
2595 }
2596
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002597 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002598 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2599 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2600 QCamera3ProcessingChannel *channel = NULL;
2601 switch (newStream->format) {
2602 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2603 if ((newStream->usage &
2604 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2605 (streamList->operation_mode ==
2606 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2607 ) {
2608 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2609 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002610 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002611 this,
2612 newStream,
2613 (cam_stream_type_t)
2614 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2615 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2616 mMetadataChannel,
2617 0); //heap buffers are not required for HFR video channel
2618 if (channel == NULL) {
2619 LOGE("allocation of channel failed");
2620 pthread_mutex_unlock(&mMutex);
2621 return -ENOMEM;
2622 }
2623 //channel->getNumBuffers() will return 0 here so use
2624 //MAX_INFLIGH_HFR_REQUESTS
2625 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2626 newStream->priv = channel;
2627 LOGI("num video buffers in HFR mode: %d",
2628 MAX_INFLIGHT_HFR_REQUESTS);
2629 } else {
2630 /* Copy stream contents in HFR preview only case to create
2631 * dummy batch channel so that sensor streaming is in
2632 * HFR mode */
2633 if (!m_bIsVideo && (streamList->operation_mode ==
2634 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2635 mDummyBatchStream = *newStream;
2636 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002637 int bufferCount = MAX_INFLIGHT_REQUESTS;
2638 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2639 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002640 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2641 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2642 bufferCount = m_bIs4KVideo ?
2643 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2644 }
2645
Thierry Strudel2896d122017-02-23 19:18:03 -08002646 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002647 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2648 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002649 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002650 this,
2651 newStream,
2652 (cam_stream_type_t)
2653 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2654 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2655 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002656 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002657 if (channel == NULL) {
2658 LOGE("allocation of channel failed");
2659 pthread_mutex_unlock(&mMutex);
2660 return -ENOMEM;
2661 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002662 /* disable UBWC for preview, though supported,
2663 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002664 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002665 (previewSize.width == (int32_t)videoWidth)&&
2666 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002667 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002668 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002669 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002670 /* When goog_zoom is linked to the preview or video stream,
2671 * disable ubwc to the linked stream */
2672 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2673 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2674 channel->setUBWCEnabled(false);
2675 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002676 newStream->max_buffers = channel->getNumBuffers();
2677 newStream->priv = channel;
2678 }
2679 break;
2680 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2681 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2682 mChannelHandle,
2683 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002684 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002685 this,
2686 newStream,
2687 (cam_stream_type_t)
2688 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2689 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2690 mMetadataChannel);
2691 if (channel == NULL) {
2692 LOGE("allocation of YUV channel failed");
2693 pthread_mutex_unlock(&mMutex);
2694 return -ENOMEM;
2695 }
2696 newStream->max_buffers = channel->getNumBuffers();
2697 newStream->priv = channel;
2698 break;
2699 }
2700 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2701 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002702 case HAL_PIXEL_FORMAT_RAW10: {
2703 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2704 (HAL_DATASPACE_DEPTH != newStream->data_space))
2705 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 mRawChannel = new QCamera3RawChannel(
2707 mCameraHandle->camera_handle, mChannelHandle,
2708 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002709 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002710 this, newStream,
2711 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002712 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002713 if (mRawChannel == NULL) {
2714 LOGE("allocation of raw channel failed");
2715 pthread_mutex_unlock(&mMutex);
2716 return -ENOMEM;
2717 }
2718 newStream->max_buffers = mRawChannel->getNumBuffers();
2719 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2720 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002721 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002722 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002723 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2724 mDepthChannel = new QCamera3DepthChannel(
2725 mCameraHandle->camera_handle, mChannelHandle,
2726 mCameraHandle->ops, NULL, NULL, &padding_info,
2727 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2728 mMetadataChannel);
2729 if (NULL == mDepthChannel) {
2730 LOGE("Allocation of depth channel failed");
2731 pthread_mutex_unlock(&mMutex);
2732 return NO_MEMORY;
2733 }
2734 newStream->priv = mDepthChannel;
2735 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2736 } else {
2737 // Max live snapshot inflight buffer is 1. This is to mitigate
2738 // frame drop issues for video snapshot. The more buffers being
2739 // allocated, the more frame drops there are.
2740 mPictureChannel = new QCamera3PicChannel(
2741 mCameraHandle->camera_handle, mChannelHandle,
2742 mCameraHandle->ops, captureResultCb,
2743 setBufferErrorStatus, &padding_info, this, newStream,
2744 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2745 m_bIs4KVideo, isZsl, mMetadataChannel,
2746 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2747 if (mPictureChannel == NULL) {
2748 LOGE("allocation of channel failed");
2749 pthread_mutex_unlock(&mMutex);
2750 return -ENOMEM;
2751 }
2752 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2753 newStream->max_buffers = mPictureChannel->getNumBuffers();
2754 mPictureChannel->overrideYuvSize(
2755 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2756 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002757 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002758 break;
2759
2760 default:
2761 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002762 pthread_mutex_unlock(&mMutex);
2763 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002764 }
2765 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2766 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2767 } else {
2768 LOGE("Error, Unknown stream type");
2769 pthread_mutex_unlock(&mMutex);
2770 return -EINVAL;
2771 }
2772
2773 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002774 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002775 // Here we only care whether it's EIS3 or not
2776 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2777 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2778 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2779 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002780 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002781 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002782 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002783 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2784 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2785 }
2786 }
2787
2788 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2789 it != mStreamInfo.end(); it++) {
2790 if ((*it)->stream == newStream) {
2791 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2792 break;
2793 }
2794 }
2795 } else {
2796 // Channel already exists for this stream
2797 // Do nothing for now
2798 }
2799 padding_info = gCamCapability[mCameraId]->padding_info;
2800
Emilian Peev7650c122017-01-19 08:24:33 -08002801 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002802 * since there is no real stream associated with it
2803 */
Emilian Peev7650c122017-01-19 08:24:33 -08002804 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002805 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2806 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002807 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002808 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002809 }
2810
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002811 // Let buffer dispatcher know the configured streams.
2812 mOutputBufferDispatcher.configureStreams(streamList);
2813
Thierry Strudel2896d122017-02-23 19:18:03 -08002814 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2815 onlyRaw = false;
2816 }
2817
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002818 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002819 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002820 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002821 cam_analysis_info_t analysisInfo;
2822 int32_t ret = NO_ERROR;
2823 ret = mCommon.getAnalysisInfo(
2824 FALSE,
2825 analysisFeatureMask,
2826 &analysisInfo);
2827 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002828 cam_color_filter_arrangement_t analysis_color_arrangement =
2829 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2830 CAM_FILTER_ARRANGEMENT_Y :
2831 gCamCapability[mCameraId]->color_arrangement);
2832 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2833 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002834 cam_dimension_t analysisDim;
2835 analysisDim = mCommon.getMatchingDimension(previewSize,
2836 analysisInfo.analysis_recommended_res);
2837
2838 mAnalysisChannel = new QCamera3SupportChannel(
2839 mCameraHandle->camera_handle,
2840 mChannelHandle,
2841 mCameraHandle->ops,
2842 &analysisInfo.analysis_padding_info,
2843 analysisFeatureMask,
2844 CAM_STREAM_TYPE_ANALYSIS,
2845 &analysisDim,
2846 (analysisInfo.analysis_format
2847 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2848 : CAM_FORMAT_YUV_420_NV21),
2849 analysisInfo.hw_analysis_supported,
2850 gCamCapability[mCameraId]->color_arrangement,
2851 this,
2852 0); // force buffer count to 0
2853 } else {
2854 LOGW("getAnalysisInfo failed, ret = %d", ret);
2855 }
2856 if (!mAnalysisChannel) {
2857 LOGW("Analysis channel cannot be created");
2858 }
2859 }
2860
Thierry Strudel3d639192016-09-09 11:52:26 -07002861 //RAW DUMP channel
2862 if (mEnableRawDump && isRawStreamRequested == false){
2863 cam_dimension_t rawDumpSize;
2864 rawDumpSize = getMaxRawSize(mCameraId);
2865 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2866 setPAAFSupport(rawDumpFeatureMask,
2867 CAM_STREAM_TYPE_RAW,
2868 gCamCapability[mCameraId]->color_arrangement);
2869 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2870 mChannelHandle,
2871 mCameraHandle->ops,
2872 rawDumpSize,
2873 &padding_info,
2874 this, rawDumpFeatureMask);
2875 if (!mRawDumpChannel) {
2876 LOGE("Raw Dump channel cannot be created");
2877 pthread_mutex_unlock(&mMutex);
2878 return -ENOMEM;
2879 }
2880 }
2881
Thierry Strudel3d639192016-09-09 11:52:26 -07002882 if (mAnalysisChannel) {
2883 cam_analysis_info_t analysisInfo;
2884 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2885 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2886 CAM_STREAM_TYPE_ANALYSIS;
2887 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2888 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002889 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002890 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2891 &analysisInfo);
2892 if (rc != NO_ERROR) {
2893 LOGE("getAnalysisInfo failed, ret = %d", rc);
2894 pthread_mutex_unlock(&mMutex);
2895 return rc;
2896 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002897 cam_color_filter_arrangement_t analysis_color_arrangement =
2898 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2899 CAM_FILTER_ARRANGEMENT_Y :
2900 gCamCapability[mCameraId]->color_arrangement);
2901 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2902 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2903 analysis_color_arrangement);
2904
Thierry Strudel3d639192016-09-09 11:52:26 -07002905 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002906 mCommon.getMatchingDimension(previewSize,
2907 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002908 mStreamConfigInfo.num_streams++;
2909 }
2910
Thierry Strudel2896d122017-02-23 19:18:03 -08002911 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002912 cam_analysis_info_t supportInfo;
2913 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2914 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2915 setPAAFSupport(callbackFeatureMask,
2916 CAM_STREAM_TYPE_CALLBACK,
2917 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002918 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002919 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002920 if (ret != NO_ERROR) {
2921 /* Ignore the error for Mono camera
2922 * because the PAAF bit mask is only set
2923 * for CAM_STREAM_TYPE_ANALYSIS stream type
2924 */
2925 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2926 LOGW("getAnalysisInfo failed, ret = %d", ret);
2927 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002928 }
2929 mSupportChannel = new QCamera3SupportChannel(
2930 mCameraHandle->camera_handle,
2931 mChannelHandle,
2932 mCameraHandle->ops,
2933 &gCamCapability[mCameraId]->padding_info,
2934 callbackFeatureMask,
2935 CAM_STREAM_TYPE_CALLBACK,
2936 &QCamera3SupportChannel::kDim,
2937 CAM_FORMAT_YUV_420_NV21,
2938 supportInfo.hw_analysis_supported,
2939 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002940 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002941 if (!mSupportChannel) {
2942 LOGE("dummy channel cannot be created");
2943 pthread_mutex_unlock(&mMutex);
2944 return -ENOMEM;
2945 }
2946 }
2947
2948 if (mSupportChannel) {
2949 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2950 QCamera3SupportChannel::kDim;
2951 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2952 CAM_STREAM_TYPE_CALLBACK;
2953 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2954 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2955 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2956 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2957 gCamCapability[mCameraId]->color_arrangement);
2958 mStreamConfigInfo.num_streams++;
2959 }
2960
2961 if (mRawDumpChannel) {
2962 cam_dimension_t rawSize;
2963 rawSize = getMaxRawSize(mCameraId);
2964 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2965 rawSize;
2966 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2967 CAM_STREAM_TYPE_RAW;
2968 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2969 CAM_QCOM_FEATURE_NONE;
2970 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2971 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2972 gCamCapability[mCameraId]->color_arrangement);
2973 mStreamConfigInfo.num_streams++;
2974 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002975
2976 if (mHdrPlusRawSrcChannel) {
2977 cam_dimension_t rawSize;
2978 rawSize = getMaxRawSize(mCameraId);
2979 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2980 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2981 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2982 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2983 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2984 gCamCapability[mCameraId]->color_arrangement);
2985 mStreamConfigInfo.num_streams++;
2986 }
2987
Thierry Strudel3d639192016-09-09 11:52:26 -07002988 /* In HFR mode, if video stream is not added, create a dummy channel so that
2989 * ISP can create a batch mode even for preview only case. This channel is
2990 * never 'start'ed (no stream-on), it is only 'initialized' */
2991 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2992 !m_bIsVideo) {
2993 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2994 setPAAFSupport(dummyFeatureMask,
2995 CAM_STREAM_TYPE_VIDEO,
2996 gCamCapability[mCameraId]->color_arrangement);
2997 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2998 mChannelHandle,
2999 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003000 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07003001 this,
3002 &mDummyBatchStream,
3003 CAM_STREAM_TYPE_VIDEO,
3004 dummyFeatureMask,
3005 mMetadataChannel);
3006 if (NULL == mDummyBatchChannel) {
3007 LOGE("creation of mDummyBatchChannel failed."
3008 "Preview will use non-hfr sensor mode ");
3009 }
3010 }
3011 if (mDummyBatchChannel) {
3012 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3013 mDummyBatchStream.width;
3014 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3015 mDummyBatchStream.height;
3016 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3017 CAM_STREAM_TYPE_VIDEO;
3018 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3019 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3020 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3021 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3022 gCamCapability[mCameraId]->color_arrangement);
3023 mStreamConfigInfo.num_streams++;
3024 }
3025
3026 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3027 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003028 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003029 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003030
3031 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3032 for (pendingRequestIterator i = mPendingRequestsList.begin();
3033 i != mPendingRequestsList.end();) {
3034 i = erasePendingRequest(i);
3035 }
3036 mPendingFrameDropList.clear();
3037 // Initialize/Reset the pending buffers list
3038 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3039 req.mPendingBufferList.clear();
3040 }
3041 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003042 mExpectedInflightDuration = 0;
3043 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003044
Thierry Strudel3d639192016-09-09 11:52:26 -07003045 mCurJpegMeta.clear();
3046 //Get min frame duration for this streams configuration
3047 deriveMinFrameDuration();
3048
Chien-Yu Chenee335912017-02-09 17:53:20 -08003049 mFirstPreviewIntentSeen = false;
3050
3051 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003052 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003053 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3054 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003055 disableHdrPlusModeLocked();
3056 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003057
Thierry Strudel3d639192016-09-09 11:52:26 -07003058 // Update state
3059 mState = CONFIGURED;
3060
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003061 mFirstMetadataCallback = true;
3062
Thierry Strudel3d639192016-09-09 11:52:26 -07003063 pthread_mutex_unlock(&mMutex);
3064
3065 return rc;
3066}
3067
3068/*===========================================================================
3069 * FUNCTION : validateCaptureRequest
3070 *
3071 * DESCRIPTION: validate a capture request from camera service
3072 *
3073 * PARAMETERS :
3074 * @request : request from framework to process
3075 *
3076 * RETURN :
3077 *
3078 *==========================================================================*/
3079int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003080 camera3_capture_request_t *request,
3081 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003082{
3083 ssize_t idx = 0;
3084 const camera3_stream_buffer_t *b;
3085 CameraMetadata meta;
3086
3087 /* Sanity check the request */
3088 if (request == NULL) {
3089 LOGE("NULL capture request");
3090 return BAD_VALUE;
3091 }
3092
3093 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3094 /*settings cannot be null for the first request*/
3095 return BAD_VALUE;
3096 }
3097
3098 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003099 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3100 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003101 LOGE("Request %d: No output buffers provided!",
3102 __FUNCTION__, frameNumber);
3103 return BAD_VALUE;
3104 }
3105 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3106 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3107 request->num_output_buffers, MAX_NUM_STREAMS);
3108 return BAD_VALUE;
3109 }
3110 if (request->input_buffer != NULL) {
3111 b = request->input_buffer;
3112 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3113 LOGE("Request %d: Buffer %ld: Status not OK!",
3114 frameNumber, (long)idx);
3115 return BAD_VALUE;
3116 }
3117 if (b->release_fence != -1) {
3118 LOGE("Request %d: Buffer %ld: Has a release fence!",
3119 frameNumber, (long)idx);
3120 return BAD_VALUE;
3121 }
3122 if (b->buffer == NULL) {
3123 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3124 frameNumber, (long)idx);
3125 return BAD_VALUE;
3126 }
3127 }
3128
3129 // Validate all buffers
3130 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003131 if (b == NULL) {
3132 return BAD_VALUE;
3133 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003134 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003135 QCamera3ProcessingChannel *channel =
3136 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3137 if (channel == NULL) {
3138 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3139 frameNumber, (long)idx);
3140 return BAD_VALUE;
3141 }
3142 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3143 LOGE("Request %d: Buffer %ld: Status not OK!",
3144 frameNumber, (long)idx);
3145 return BAD_VALUE;
3146 }
3147 if (b->release_fence != -1) {
3148 LOGE("Request %d: Buffer %ld: Has a release fence!",
3149 frameNumber, (long)idx);
3150 return BAD_VALUE;
3151 }
3152 if (b->buffer == NULL) {
3153 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3154 frameNumber, (long)idx);
3155 return BAD_VALUE;
3156 }
3157 if (*(b->buffer) == NULL) {
3158 LOGE("Request %d: Buffer %ld: NULL private handle!",
3159 frameNumber, (long)idx);
3160 return BAD_VALUE;
3161 }
3162 idx++;
3163 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003164 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003165 return NO_ERROR;
3166}
3167
3168/*===========================================================================
3169 * FUNCTION : deriveMinFrameDuration
3170 *
3171 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3172 * on currently configured streams.
3173 *
3174 * PARAMETERS : NONE
3175 *
3176 * RETURN : NONE
3177 *
3178 *==========================================================================*/
3179void QCamera3HardwareInterface::deriveMinFrameDuration()
3180{
3181 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003182 bool hasRaw = false;
3183
3184 mMinRawFrameDuration = 0;
3185 mMinJpegFrameDuration = 0;
3186 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003187
3188 maxJpegDim = 0;
3189 maxProcessedDim = 0;
3190 maxRawDim = 0;
3191
3192 // Figure out maximum jpeg, processed, and raw dimensions
3193 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3194 it != mStreamInfo.end(); it++) {
3195
3196 // Input stream doesn't have valid stream_type
3197 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3198 continue;
3199
3200 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3201 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3202 if (dimension > maxJpegDim)
3203 maxJpegDim = dimension;
3204 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3205 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3206 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003207 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003208 if (dimension > maxRawDim)
3209 maxRawDim = dimension;
3210 } else {
3211 if (dimension > maxProcessedDim)
3212 maxProcessedDim = dimension;
3213 }
3214 }
3215
3216 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3217 MAX_SIZES_CNT);
3218
3219 //Assume all jpeg dimensions are in processed dimensions.
3220 if (maxJpegDim > maxProcessedDim)
3221 maxProcessedDim = maxJpegDim;
3222 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003223 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003224 maxRawDim = INT32_MAX;
3225
3226 for (size_t i = 0; i < count; i++) {
3227 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3228 gCamCapability[mCameraId]->raw_dim[i].height;
3229 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3230 maxRawDim = dimension;
3231 }
3232 }
3233
3234 //Find minimum durations for processed, jpeg, and raw
3235 for (size_t i = 0; i < count; i++) {
3236 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3237 gCamCapability[mCameraId]->raw_dim[i].height) {
3238 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3239 break;
3240 }
3241 }
3242 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3243 for (size_t i = 0; i < count; i++) {
3244 if (maxProcessedDim ==
3245 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3246 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3247 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3248 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3249 break;
3250 }
3251 }
3252}
3253
3254/*===========================================================================
3255 * FUNCTION : getMinFrameDuration
3256 *
3257 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3258 * and current request configuration.
3259 *
3260 * PARAMETERS : @request: requset sent by the frameworks
3261 *
3262 * RETURN : min farme duration for a particular request
3263 *
3264 *==========================================================================*/
3265int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3266{
3267 bool hasJpegStream = false;
3268 bool hasRawStream = false;
3269 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3270 const camera3_stream_t *stream = request->output_buffers[i].stream;
3271 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3272 hasJpegStream = true;
3273 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3274 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3275 stream->format == HAL_PIXEL_FORMAT_RAW16)
3276 hasRawStream = true;
3277 }
3278
3279 if (!hasJpegStream)
3280 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3281 else
3282 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3283}
3284
3285/*===========================================================================
3286 * FUNCTION : handleBuffersDuringFlushLock
3287 *
3288 * DESCRIPTION: Account for buffers returned from back-end during flush
3289 * This function is executed while mMutex is held by the caller.
3290 *
3291 * PARAMETERS :
3292 * @buffer: image buffer for the callback
3293 *
3294 * RETURN :
3295 *==========================================================================*/
3296void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3297{
3298 bool buffer_found = false;
3299 for (List<PendingBuffersInRequest>::iterator req =
3300 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3301 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3302 for (List<PendingBufferInfo>::iterator i =
3303 req->mPendingBufferList.begin();
3304 i != req->mPendingBufferList.end(); i++) {
3305 if (i->buffer == buffer->buffer) {
3306 mPendingBuffersMap.numPendingBufsAtFlush--;
3307 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3308 buffer->buffer, req->frame_number,
3309 mPendingBuffersMap.numPendingBufsAtFlush);
3310 buffer_found = true;
3311 break;
3312 }
3313 }
3314 if (buffer_found) {
3315 break;
3316 }
3317 }
3318 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3319 //signal the flush()
3320 LOGD("All buffers returned to HAL. Continue flush");
3321 pthread_cond_signal(&mBuffersCond);
3322 }
3323}
3324
Thierry Strudel3d639192016-09-09 11:52:26 -07003325/*===========================================================================
3326 * FUNCTION : handleBatchMetadata
3327 *
3328 * DESCRIPTION: Handles metadata buffer callback in batch mode
3329 *
3330 * PARAMETERS : @metadata_buf: metadata buffer
3331 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3332 * the meta buf in this method
3333 *
3334 * RETURN :
3335 *
3336 *==========================================================================*/
3337void QCamera3HardwareInterface::handleBatchMetadata(
3338 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3339{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003340 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003341
3342 if (NULL == metadata_buf) {
3343 LOGE("metadata_buf is NULL");
3344 return;
3345 }
3346 /* In batch mode, the metdata will contain the frame number and timestamp of
3347 * the last frame in the batch. Eg: a batch containing buffers from request
3348 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3349 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3350 * multiple process_capture_results */
3351 metadata_buffer_t *metadata =
3352 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3353 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3354 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3355 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3356 uint32_t frame_number = 0, urgent_frame_number = 0;
3357 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3358 bool invalid_metadata = false;
3359 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3360 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003361 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003362
3363 int32_t *p_frame_number_valid =
3364 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3365 uint32_t *p_frame_number =
3366 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3367 int64_t *p_capture_time =
3368 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3369 int32_t *p_urgent_frame_number_valid =
3370 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3371 uint32_t *p_urgent_frame_number =
3372 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3373
3374 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3375 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3376 (NULL == p_urgent_frame_number)) {
3377 LOGE("Invalid metadata");
3378 invalid_metadata = true;
3379 } else {
3380 frame_number_valid = *p_frame_number_valid;
3381 last_frame_number = *p_frame_number;
3382 last_frame_capture_time = *p_capture_time;
3383 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3384 last_urgent_frame_number = *p_urgent_frame_number;
3385 }
3386
3387 /* In batchmode, when no video buffers are requested, set_parms are sent
3388 * for every capture_request. The difference between consecutive urgent
3389 * frame numbers and frame numbers should be used to interpolate the
3390 * corresponding frame numbers and time stamps */
3391 pthread_mutex_lock(&mMutex);
3392 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003393 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3394 if(idx < 0) {
3395 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3396 last_urgent_frame_number);
3397 mState = ERROR;
3398 pthread_mutex_unlock(&mMutex);
3399 return;
3400 }
3401 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003402 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3403 first_urgent_frame_number;
3404
3405 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3406 urgent_frame_number_valid,
3407 first_urgent_frame_number, last_urgent_frame_number);
3408 }
3409
3410 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003411 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3412 if(idx < 0) {
3413 LOGE("Invalid frame number received: %d. Irrecoverable error",
3414 last_frame_number);
3415 mState = ERROR;
3416 pthread_mutex_unlock(&mMutex);
3417 return;
3418 }
3419 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003420 frameNumDiff = last_frame_number + 1 -
3421 first_frame_number;
3422 mPendingBatchMap.removeItem(last_frame_number);
3423
3424 LOGD("frm: valid: %d frm_num: %d - %d",
3425 frame_number_valid,
3426 first_frame_number, last_frame_number);
3427
3428 }
3429 pthread_mutex_unlock(&mMutex);
3430
3431 if (urgent_frame_number_valid || frame_number_valid) {
3432 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3433 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3434 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3435 urgentFrameNumDiff, last_urgent_frame_number);
3436 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3437 LOGE("frameNumDiff: %d frameNum: %d",
3438 frameNumDiff, last_frame_number);
3439 }
3440
3441 for (size_t i = 0; i < loopCount; i++) {
3442 /* handleMetadataWithLock is called even for invalid_metadata for
3443 * pipeline depth calculation */
3444 if (!invalid_metadata) {
3445 /* Infer frame number. Batch metadata contains frame number of the
3446 * last frame */
3447 if (urgent_frame_number_valid) {
3448 if (i < urgentFrameNumDiff) {
3449 urgent_frame_number =
3450 first_urgent_frame_number + i;
3451 LOGD("inferred urgent frame_number: %d",
3452 urgent_frame_number);
3453 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3454 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3455 } else {
3456 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3457 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3458 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3459 }
3460 }
3461
3462 /* Infer frame number. Batch metadata contains frame number of the
3463 * last frame */
3464 if (frame_number_valid) {
3465 if (i < frameNumDiff) {
3466 frame_number = first_frame_number + i;
3467 LOGD("inferred frame_number: %d", frame_number);
3468 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3469 CAM_INTF_META_FRAME_NUMBER, frame_number);
3470 } else {
3471 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3472 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3473 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3474 }
3475 }
3476
3477 if (last_frame_capture_time) {
3478 //Infer timestamp
3479 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003480 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003481 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003482 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003483 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3484 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3485 LOGD("batch capture_time: %lld, capture_time: %lld",
3486 last_frame_capture_time, capture_time);
3487 }
3488 }
3489 pthread_mutex_lock(&mMutex);
3490 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003491 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003492 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3493 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003494 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 pthread_mutex_unlock(&mMutex);
3496 }
3497
3498 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003499 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003500 mMetadataChannel->bufDone(metadata_buf);
3501 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003502 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003503 }
3504}
3505
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003506void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3507 camera3_error_msg_code_t errorCode)
3508{
3509 camera3_notify_msg_t notify_msg;
3510 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3511 notify_msg.type = CAMERA3_MSG_ERROR;
3512 notify_msg.message.error.error_code = errorCode;
3513 notify_msg.message.error.error_stream = NULL;
3514 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003515 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003516
3517 return;
3518}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003519
3520/*===========================================================================
3521 * FUNCTION : sendPartialMetadataWithLock
3522 *
3523 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3524 *
3525 * PARAMETERS : @metadata: metadata buffer
3526 * @requestIter: The iterator for the pending capture request for
3527 * which the partial result is being sen
3528 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3529 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003530 * @isJumpstartMetadata: Whether this is a partial metadata for
3531 * jumpstart, i.e. even though it doesn't map to a valid partial
3532 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003533 *
3534 * RETURN :
3535 *
3536 *==========================================================================*/
3537
3538void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3539 metadata_buffer_t *metadata,
3540 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003541 bool lastUrgentMetadataInBatch,
3542 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003543{
3544 camera3_capture_result_t result;
3545 memset(&result, 0, sizeof(camera3_capture_result_t));
3546
3547 requestIter->partial_result_cnt++;
3548
3549 // Extract 3A metadata
3550 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003551 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3552 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003553 // Populate metadata result
3554 result.frame_number = requestIter->frame_number;
3555 result.num_output_buffers = 0;
3556 result.output_buffers = NULL;
3557 result.partial_result = requestIter->partial_result_cnt;
3558
3559 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003560 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003561 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3562 // Notify HDR+ client about the partial metadata.
3563 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3564 result.partial_result == PARTIAL_RESULT_COUNT);
3565 }
3566 }
3567
3568 orchestrateResult(&result);
3569 LOGD("urgent frame_number = %u", result.frame_number);
3570 free_camera_metadata((camera_metadata_t *)result.result);
3571}
3572
Thierry Strudel3d639192016-09-09 11:52:26 -07003573/*===========================================================================
3574 * FUNCTION : handleMetadataWithLock
3575 *
3576 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3577 *
3578 * PARAMETERS : @metadata_buf: metadata buffer
3579 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3580 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003581 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3582 * last urgent metadata in a batch. Always true for non-batch mode
3583 * @lastMetadataInBatch: Boolean to indicate whether this is the
3584 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003585 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3586 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003587 *
3588 * RETURN :
3589 *
3590 *==========================================================================*/
3591void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003592 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003593 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3594 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003595{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003596 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003597 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3598 //during flush do not send metadata from this thread
3599 LOGD("not sending metadata during flush or when mState is error");
3600 if (free_and_bufdone_meta_buf) {
3601 mMetadataChannel->bufDone(metadata_buf);
3602 free(metadata_buf);
3603 }
3604 return;
3605 }
3606
3607 //not in flush
3608 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3609 int32_t frame_number_valid, urgent_frame_number_valid;
3610 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003611 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003612 nsecs_t currentSysTime;
3613
3614 int32_t *p_frame_number_valid =
3615 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3616 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3617 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003618 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003619 int32_t *p_urgent_frame_number_valid =
3620 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3621 uint32_t *p_urgent_frame_number =
3622 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3623 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3624 metadata) {
3625 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3626 *p_frame_number_valid, *p_frame_number);
3627 }
3628
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003629 camera_metadata_t *resultMetadata = nullptr;
3630
Thierry Strudel3d639192016-09-09 11:52:26 -07003631 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3632 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3633 LOGE("Invalid metadata");
3634 if (free_and_bufdone_meta_buf) {
3635 mMetadataChannel->bufDone(metadata_buf);
3636 free(metadata_buf);
3637 }
3638 goto done_metadata;
3639 }
3640 frame_number_valid = *p_frame_number_valid;
3641 frame_number = *p_frame_number;
3642 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003643 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003644 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3645 urgent_frame_number = *p_urgent_frame_number;
3646 currentSysTime = systemTime(CLOCK_MONOTONIC);
3647
Jason Lee603176d2017-05-31 11:43:27 -07003648 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3649 const int tries = 3;
3650 nsecs_t bestGap, measured;
3651 for (int i = 0; i < tries; ++i) {
3652 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3653 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3654 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3655 const nsecs_t gap = tmono2 - tmono;
3656 if (i == 0 || gap < bestGap) {
3657 bestGap = gap;
3658 measured = tbase - ((tmono + tmono2) >> 1);
3659 }
3660 }
3661 capture_time -= measured;
3662 }
3663
Thierry Strudel3d639192016-09-09 11:52:26 -07003664 // Detect if buffers from any requests are overdue
3665 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003666 int64_t timeout;
3667 {
3668 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3669 // If there is a pending HDR+ request, the following requests may be blocked until the
3670 // HDR+ request is done. So allow a longer timeout.
3671 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3672 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003673 if (timeout < mExpectedInflightDuration) {
3674 timeout = mExpectedInflightDuration;
3675 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003676 }
3677
3678 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003679 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003680 assert(missed.stream->priv);
3681 if (missed.stream->priv) {
3682 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3683 assert(ch->mStreams[0]);
3684 if (ch->mStreams[0]) {
3685 LOGE("Cancel missing frame = %d, buffer = %p,"
3686 "stream type = %d, stream format = %d",
3687 req.frame_number, missed.buffer,
3688 ch->mStreams[0]->getMyType(), missed.stream->format);
3689 ch->timeoutFrame(req.frame_number);
3690 }
3691 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003692 }
3693 }
3694 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003695 //For the very first metadata callback, regardless whether it contains valid
3696 //frame number, send the partial metadata for the jumpstarting requests.
3697 //Note that this has to be done even if the metadata doesn't contain valid
3698 //urgent frame number, because in the case only 1 request is ever submitted
3699 //to HAL, there won't be subsequent valid urgent frame number.
3700 if (mFirstMetadataCallback) {
3701 for (pendingRequestIterator i =
3702 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3703 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003704 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3705 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003706 }
3707 }
3708 mFirstMetadataCallback = false;
3709 }
3710
Thierry Strudel3d639192016-09-09 11:52:26 -07003711 //Partial result on process_capture_result for timestamp
3712 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003713 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003714
3715 //Recieved an urgent Frame Number, handle it
3716 //using partial results
3717 for (pendingRequestIterator i =
3718 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3719 LOGD("Iterator Frame = %d urgent frame = %d",
3720 i->frame_number, urgent_frame_number);
3721
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003722 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003723 (i->partial_result_cnt == 0)) {
3724 LOGE("Error: HAL missed urgent metadata for frame number %d",
3725 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003726 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003727 }
3728
3729 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003730 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003731 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3732 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003733 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3734 // Instant AEC settled for this frame.
3735 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3736 mInstantAECSettledFrameNumber = urgent_frame_number;
3737 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003738 break;
3739 }
3740 }
3741 }
3742
3743 if (!frame_number_valid) {
3744 LOGD("Not a valid normal frame number, used as SOF only");
3745 if (free_and_bufdone_meta_buf) {
3746 mMetadataChannel->bufDone(metadata_buf);
3747 free(metadata_buf);
3748 }
3749 goto done_metadata;
3750 }
3751 LOGH("valid frame_number = %u, capture_time = %lld",
3752 frame_number, capture_time);
3753
Emilian Peev4e0fe952017-06-30 12:40:09 -07003754 handleDepthDataLocked(metadata->depth_data, frame_number,
3755 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003756
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003757 // Check whether any stream buffer corresponding to this is dropped or not
3758 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3759 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3760 for (auto & pendingRequest : mPendingRequestsList) {
3761 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3762 mInstantAECSettledFrameNumber)) {
3763 camera3_notify_msg_t notify_msg = {};
3764 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003765 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003766 QCamera3ProcessingChannel *channel =
3767 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003768 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003769 if (p_cam_frame_drop) {
3770 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003771 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003772 // Got the stream ID for drop frame.
3773 dropFrame = true;
3774 break;
3775 }
3776 }
3777 } else {
3778 // This is instant AEC case.
3779 // For instant AEC drop the stream untill AEC is settled.
3780 dropFrame = true;
3781 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003782
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003783 if (dropFrame) {
3784 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3785 if (p_cam_frame_drop) {
3786 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003787 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003788 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003789 } else {
3790 // For instant AEC, inform frame drop and frame number
3791 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3792 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003793 pendingRequest.frame_number, streamID,
3794 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003795 }
3796 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003797 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003798 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003799 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003800 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003801 if (p_cam_frame_drop) {
3802 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003803 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003804 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003805 } else {
3806 // For instant AEC, inform frame drop and frame number
3807 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3808 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003809 pendingRequest.frame_number, streamID,
3810 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003811 }
3812 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003813 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003814 PendingFrameDrop.stream_ID = streamID;
3815 // Add the Frame drop info to mPendingFrameDropList
3816 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003817 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003818 }
3819 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003820 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003821
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003822 for (auto & pendingRequest : mPendingRequestsList) {
3823 // Find the pending request with the frame number.
3824 if (pendingRequest.frame_number == frame_number) {
3825 // Update the sensor timestamp.
3826 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003827
Thierry Strudel3d639192016-09-09 11:52:26 -07003828
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003829 /* Set the timestamp in display metadata so that clients aware of
3830 private_handle such as VT can use this un-modified timestamps.
3831 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003832 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003833
Thierry Strudel3d639192016-09-09 11:52:26 -07003834 // Find channel requiring metadata, meaning internal offline postprocess
3835 // is needed.
3836 //TODO: for now, we don't support two streams requiring metadata at the same time.
3837 // (because we are not making copies, and metadata buffer is not reference counted.
3838 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003839 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3840 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003841 if (iter->need_metadata) {
3842 internalPproc = true;
3843 QCamera3ProcessingChannel *channel =
3844 (QCamera3ProcessingChannel *)iter->stream->priv;
3845 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003846 if(p_is_metabuf_queued != NULL) {
3847 *p_is_metabuf_queued = true;
3848 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003849 break;
3850 }
3851 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003852 for (auto itr = pendingRequest.internalRequestList.begin();
3853 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003854 if (itr->need_metadata) {
3855 internalPproc = true;
3856 QCamera3ProcessingChannel *channel =
3857 (QCamera3ProcessingChannel *)itr->stream->priv;
3858 channel->queueReprocMetadata(metadata_buf);
3859 break;
3860 }
3861 }
3862
Thierry Strudel54dc9782017-02-15 12:12:10 -08003863 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003864
3865 bool *enableZsl = nullptr;
3866 if (gExposeEnableZslKey) {
3867 enableZsl = &pendingRequest.enableZsl;
3868 }
3869
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003870 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003871 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003872 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003873
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003874 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003875
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003876 if (pendingRequest.blob_request) {
3877 //Dump tuning metadata if enabled and available
3878 char prop[PROPERTY_VALUE_MAX];
3879 memset(prop, 0, sizeof(prop));
3880 property_get("persist.camera.dumpmetadata", prop, "0");
3881 int32_t enabled = atoi(prop);
3882 if (enabled && metadata->is_tuning_params_valid) {
3883 dumpMetadataToFile(metadata->tuning_params,
3884 mMetaFrameCount,
3885 enabled,
3886 "Snapshot",
3887 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003888 }
3889 }
3890
3891 if (!internalPproc) {
3892 LOGD("couldn't find need_metadata for this metadata");
3893 // Return metadata buffer
3894 if (free_and_bufdone_meta_buf) {
3895 mMetadataChannel->bufDone(metadata_buf);
3896 free(metadata_buf);
3897 }
3898 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003899
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003900 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003901 }
3902 }
3903
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003904 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3905
3906 // Try to send out capture result metadata.
3907 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003908 return;
3909
Thierry Strudel3d639192016-09-09 11:52:26 -07003910done_metadata:
3911 for (pendingRequestIterator i = mPendingRequestsList.begin();
3912 i != mPendingRequestsList.end() ;i++) {
3913 i->pipeline_depth++;
3914 }
3915 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3916 unblockRequestIfNecessary();
3917}
3918
3919/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003920 * FUNCTION : handleDepthDataWithLock
3921 *
3922 * DESCRIPTION: Handles incoming depth data
3923 *
3924 * PARAMETERS : @depthData : Depth data
3925 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003926 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003927 *
3928 * RETURN :
3929 *
3930 *==========================================================================*/
3931void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003932 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003933 uint32_t currentFrameNumber;
3934 buffer_handle_t *depthBuffer;
3935
3936 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003937 return;
3938 }
3939
3940 camera3_stream_buffer_t resultBuffer =
3941 {.acquire_fence = -1,
3942 .release_fence = -1,
3943 .status = CAMERA3_BUFFER_STATUS_OK,
3944 .buffer = nullptr,
3945 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003946 do {
3947 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3948 if (nullptr == depthBuffer) {
3949 break;
3950 }
3951
Emilian Peev7650c122017-01-19 08:24:33 -08003952 resultBuffer.buffer = depthBuffer;
3953 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003954 if (valid) {
3955 int32_t rc = mDepthChannel->populateDepthData(depthData,
3956 frameNumber);
3957 if (NO_ERROR != rc) {
3958 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3959 } else {
3960 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3961 }
Emilian Peev7650c122017-01-19 08:24:33 -08003962 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003963 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003964 }
3965 } else if (currentFrameNumber > frameNumber) {
3966 break;
3967 } else {
3968 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3969 {{currentFrameNumber, mDepthChannel->getStream(),
3970 CAMERA3_MSG_ERROR_BUFFER}}};
3971 orchestrateNotify(&notify_msg);
3972
3973 LOGE("Depth buffer for frame number: %d is missing "
3974 "returning back!", currentFrameNumber);
3975 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3976 }
3977 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003978 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003979 } while (currentFrameNumber < frameNumber);
3980}
3981
3982/*===========================================================================
3983 * FUNCTION : notifyErrorFoPendingDepthData
3984 *
3985 * DESCRIPTION: Returns error for any pending depth buffers
3986 *
3987 * PARAMETERS : depthCh - depth channel that needs to get flushed
3988 *
3989 * RETURN :
3990 *
3991 *==========================================================================*/
3992void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3993 QCamera3DepthChannel *depthCh) {
3994 uint32_t currentFrameNumber;
3995 buffer_handle_t *depthBuffer;
3996
3997 if (nullptr == depthCh) {
3998 return;
3999 }
4000
4001 camera3_notify_msg_t notify_msg =
4002 {.type = CAMERA3_MSG_ERROR,
4003 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4004 camera3_stream_buffer_t resultBuffer =
4005 {.acquire_fence = -1,
4006 .release_fence = -1,
4007 .buffer = nullptr,
4008 .stream = depthCh->getStream(),
4009 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004010
4011 while (nullptr !=
4012 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4013 depthCh->unmapBuffer(currentFrameNumber);
4014
4015 notify_msg.message.error.frame_number = currentFrameNumber;
4016 orchestrateNotify(&notify_msg);
4017
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004018 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004019 };
4020}
4021
4022/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004023 * FUNCTION : hdrPlusPerfLock
4024 *
4025 * DESCRIPTION: perf lock for HDR+ using custom intent
4026 *
4027 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4028 *
4029 * RETURN : None
4030 *
4031 *==========================================================================*/
4032void QCamera3HardwareInterface::hdrPlusPerfLock(
4033 mm_camera_super_buf_t *metadata_buf)
4034{
4035 if (NULL == metadata_buf) {
4036 LOGE("metadata_buf is NULL");
4037 return;
4038 }
4039 metadata_buffer_t *metadata =
4040 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4041 int32_t *p_frame_number_valid =
4042 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4043 uint32_t *p_frame_number =
4044 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4045
4046 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4047 LOGE("%s: Invalid metadata", __func__);
4048 return;
4049 }
4050
Wei Wang01385482017-08-03 10:49:34 -07004051 //acquire perf lock for 2 secs after the last HDR frame is captured
4052 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004053 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4054 if ((p_frame_number != NULL) &&
4055 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004056 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004057 }
4058 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004059}
4060
4061/*===========================================================================
4062 * FUNCTION : handleInputBufferWithLock
4063 *
4064 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4065 *
4066 * PARAMETERS : @frame_number: frame number of the input buffer
4067 *
4068 * RETURN :
4069 *
4070 *==========================================================================*/
4071void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4072{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004073 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004074 pendingRequestIterator i = mPendingRequestsList.begin();
4075 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4076 i++;
4077 }
4078 if (i != mPendingRequestsList.end() && i->input_buffer) {
4079 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004080 CameraMetadata settings;
4081 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4082 if(i->settings) {
4083 settings = i->settings;
4084 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4085 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004086 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004087 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004088 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004089 } else {
4090 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004091 }
4092
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004093 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4094 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4095 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004096
4097 camera3_capture_result result;
4098 memset(&result, 0, sizeof(camera3_capture_result));
4099 result.frame_number = frame_number;
4100 result.result = i->settings;
4101 result.input_buffer = i->input_buffer;
4102 result.partial_result = PARTIAL_RESULT_COUNT;
4103
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004104 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004105 LOGD("Input request metadata and input buffer frame_number = %u",
4106 i->frame_number);
4107 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004108
4109 // Dispatch result metadata that may be just unblocked by this reprocess result.
4110 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004111 } else {
4112 LOGE("Could not find input request for frame number %d", frame_number);
4113 }
4114}
4115
4116/*===========================================================================
4117 * FUNCTION : handleBufferWithLock
4118 *
4119 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4120 *
4121 * PARAMETERS : @buffer: image buffer for the callback
4122 * @frame_number: frame number of the image buffer
4123 *
4124 * RETURN :
4125 *
4126 *==========================================================================*/
4127void QCamera3HardwareInterface::handleBufferWithLock(
4128 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4129{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004130 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004131
4132 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4133 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4134 }
4135
Thierry Strudel3d639192016-09-09 11:52:26 -07004136 /* Nothing to be done during error state */
4137 if ((ERROR == mState) || (DEINIT == mState)) {
4138 return;
4139 }
4140 if (mFlushPerf) {
4141 handleBuffersDuringFlushLock(buffer);
4142 return;
4143 }
4144 //not in flush
4145 // If the frame number doesn't exist in the pending request list,
4146 // directly send the buffer to the frameworks, and update pending buffers map
4147 // Otherwise, book-keep the buffer.
4148 pendingRequestIterator i = mPendingRequestsList.begin();
4149 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4150 i++;
4151 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004152
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004153 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004154 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004155 // For a reprocessing request, try to send out result metadata.
4156 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004157 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004158 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004159
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004160 // Check if this frame was dropped.
4161 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4162 m != mPendingFrameDropList.end(); m++) {
4163 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4164 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4165 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4166 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4167 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4168 frame_number, streamID);
4169 m = mPendingFrameDropList.erase(m);
4170 break;
4171 }
4172 }
4173
4174 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4175 LOGH("result frame_number = %d, buffer = %p",
4176 frame_number, buffer->buffer);
4177
4178 mPendingBuffersMap.removeBuf(buffer->buffer);
4179 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4180
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004181 if (mPreviewStarted == false) {
4182 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4183 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004184 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4185
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004186 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4187 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4188 mPreviewStarted = true;
4189
4190 // Set power hint for preview
4191 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4192 }
4193 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004194}
4195
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004196void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004197 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004198{
4199 // Find the pending request for this result metadata.
4200 auto requestIter = mPendingRequestsList.begin();
4201 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4202 requestIter++;
4203 }
4204
4205 if (requestIter == mPendingRequestsList.end()) {
4206 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4207 return;
4208 }
4209
4210 // Update the result metadata
4211 requestIter->resultMetadata = resultMetadata;
4212
4213 // Check what type of request this is.
4214 bool liveRequest = false;
4215 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004216 // HDR+ request doesn't have partial results.
4217 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004218 } else if (requestIter->input_buffer != nullptr) {
4219 // Reprocessing request result is the same as settings.
4220 requestIter->resultMetadata = requestIter->settings;
4221 // Reprocessing request doesn't have partial results.
4222 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4223 } else {
4224 liveRequest = true;
Chien-Yu Chen0a921f92017-08-27 17:25:33 -07004225 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004226 mPendingLiveRequest--;
4227
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004228 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004229 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004230 // For a live request, send the metadata to HDR+ client.
4231 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4232 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4233 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4234 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004235 }
4236 }
4237
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004238 // Remove len shading map if it's not requested.
4239 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4240 CameraMetadata metadata;
4241 metadata.acquire(resultMetadata);
4242 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4243 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4244 &requestIter->requestedLensShadingMapMode, 1);
4245
4246 requestIter->resultMetadata = metadata.release();
4247 }
4248
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004249 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4250}
4251
4252void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4253 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004254 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4255 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004256 bool readyToSend = true;
4257
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004258 // Iterate through the pending requests to send out result metadata that are ready. Also if
4259 // this result metadata belongs to a live request, notify errors for previous live requests
4260 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004261 auto iter = mPendingRequestsList.begin();
4262 while (iter != mPendingRequestsList.end()) {
4263 // Check if current pending request is ready. If it's not ready, the following pending
4264 // requests are also not ready.
4265 if (readyToSend && iter->resultMetadata == nullptr) {
4266 readyToSend = false;
4267 }
4268
4269 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4270
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004271 camera3_capture_result_t result = {};
4272 result.frame_number = iter->frame_number;
4273 result.result = iter->resultMetadata;
4274 result.partial_result = iter->partial_result_cnt;
4275
4276 // If this pending buffer has result metadata, we may be able to send out shutter callback
4277 // and result metadata.
4278 if (iter->resultMetadata != nullptr) {
4279 if (!readyToSend) {
4280 // If any of the previous pending request is not ready, this pending request is
4281 // also not ready to send in order to keep shutter callbacks and result metadata
4282 // in order.
4283 iter++;
4284 continue;
4285 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004286 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004287 // If the result metadata belongs to a live request, notify errors for previous pending
4288 // live requests.
4289 mPendingLiveRequest--;
4290
4291 CameraMetadata dummyMetadata;
4292 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4293 result.result = dummyMetadata.release();
4294
4295 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004296
4297 // partial_result should be PARTIAL_RESULT_CNT in case of
4298 // ERROR_RESULT.
4299 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4300 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004301 } else {
4302 iter++;
4303 continue;
4304 }
4305
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004306 result.output_buffers = nullptr;
4307 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004308 orchestrateResult(&result);
4309
4310 // For reprocessing, result metadata is the same as settings so do not free it here to
4311 // avoid double free.
4312 if (result.result != iter->settings) {
4313 free_camera_metadata((camera_metadata_t *)result.result);
4314 }
4315 iter->resultMetadata = nullptr;
4316 iter = erasePendingRequest(iter);
4317 }
4318
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004319 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004320 for (auto &iter : mPendingRequestsList) {
4321 // Increment pipeline depth for the following pending requests.
4322 if (iter.frame_number > frameNumber) {
4323 iter.pipeline_depth++;
4324 }
4325 }
4326 }
4327
4328 unblockRequestIfNecessary();
4329}
4330
Thierry Strudel3d639192016-09-09 11:52:26 -07004331/*===========================================================================
4332 * FUNCTION : unblockRequestIfNecessary
4333 *
4334 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4335 * that mMutex is held when this function is called.
4336 *
4337 * PARAMETERS :
4338 *
4339 * RETURN :
4340 *
4341 *==========================================================================*/
4342void QCamera3HardwareInterface::unblockRequestIfNecessary()
4343{
4344 // Unblock process_capture_request
4345 pthread_cond_signal(&mRequestCond);
4346}
4347
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004348/*===========================================================================
4349 * FUNCTION : isHdrSnapshotRequest
4350 *
4351 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4352 *
4353 * PARAMETERS : camera3 request structure
4354 *
4355 * RETURN : boolean decision variable
4356 *
4357 *==========================================================================*/
4358bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4359{
4360 if (request == NULL) {
4361 LOGE("Invalid request handle");
4362 assert(0);
4363 return false;
4364 }
4365
4366 if (!mForceHdrSnapshot) {
4367 CameraMetadata frame_settings;
4368 frame_settings = request->settings;
4369
4370 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4371 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4372 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4373 return false;
4374 }
4375 } else {
4376 return false;
4377 }
4378
4379 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4380 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4381 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4382 return false;
4383 }
4384 } else {
4385 return false;
4386 }
4387 }
4388
4389 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4390 if (request->output_buffers[i].stream->format
4391 == HAL_PIXEL_FORMAT_BLOB) {
4392 return true;
4393 }
4394 }
4395
4396 return false;
4397}
4398/*===========================================================================
4399 * FUNCTION : orchestrateRequest
4400 *
4401 * DESCRIPTION: Orchestrates a capture request from camera service
4402 *
4403 * PARAMETERS :
4404 * @request : request from framework to process
4405 *
4406 * RETURN : Error status codes
4407 *
4408 *==========================================================================*/
4409int32_t QCamera3HardwareInterface::orchestrateRequest(
4410 camera3_capture_request_t *request)
4411{
4412
4413 uint32_t originalFrameNumber = request->frame_number;
4414 uint32_t originalOutputCount = request->num_output_buffers;
4415 const camera_metadata_t *original_settings = request->settings;
4416 List<InternalRequest> internallyRequestedStreams;
4417 List<InternalRequest> emptyInternalList;
4418
4419 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4420 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4421 uint32_t internalFrameNumber;
4422 CameraMetadata modified_meta;
4423
4424
4425 /* Add Blob channel to list of internally requested streams */
4426 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4427 if (request->output_buffers[i].stream->format
4428 == HAL_PIXEL_FORMAT_BLOB) {
4429 InternalRequest streamRequested;
4430 streamRequested.meteringOnly = 1;
4431 streamRequested.need_metadata = 0;
4432 streamRequested.stream = request->output_buffers[i].stream;
4433 internallyRequestedStreams.push_back(streamRequested);
4434 }
4435 }
4436 request->num_output_buffers = 0;
4437 auto itr = internallyRequestedStreams.begin();
4438
4439 /* Modify setting to set compensation */
4440 modified_meta = request->settings;
4441 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4442 uint8_t aeLock = 1;
4443 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4444 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4445 camera_metadata_t *modified_settings = modified_meta.release();
4446 request->settings = modified_settings;
4447
4448 /* Capture Settling & -2x frame */
4449 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4450 request->frame_number = internalFrameNumber;
4451 processCaptureRequest(request, internallyRequestedStreams);
4452
4453 request->num_output_buffers = originalOutputCount;
4454 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4455 request->frame_number = internalFrameNumber;
4456 processCaptureRequest(request, emptyInternalList);
4457 request->num_output_buffers = 0;
4458
4459 modified_meta = modified_settings;
4460 expCompensation = 0;
4461 aeLock = 1;
4462 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4463 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4464 modified_settings = modified_meta.release();
4465 request->settings = modified_settings;
4466
4467 /* Capture Settling & 0X frame */
4468
4469 itr = internallyRequestedStreams.begin();
4470 if (itr == internallyRequestedStreams.end()) {
4471 LOGE("Error Internally Requested Stream list is empty");
4472 assert(0);
4473 } else {
4474 itr->need_metadata = 0;
4475 itr->meteringOnly = 1;
4476 }
4477
4478 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4479 request->frame_number = internalFrameNumber;
4480 processCaptureRequest(request, internallyRequestedStreams);
4481
4482 itr = internallyRequestedStreams.begin();
4483 if (itr == internallyRequestedStreams.end()) {
4484 ALOGE("Error Internally Requested Stream list is empty");
4485 assert(0);
4486 } else {
4487 itr->need_metadata = 1;
4488 itr->meteringOnly = 0;
4489 }
4490
4491 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4492 request->frame_number = internalFrameNumber;
4493 processCaptureRequest(request, internallyRequestedStreams);
4494
4495 /* Capture 2X frame*/
4496 modified_meta = modified_settings;
4497 expCompensation = GB_HDR_2X_STEP_EV;
4498 aeLock = 1;
4499 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4500 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4501 modified_settings = modified_meta.release();
4502 request->settings = modified_settings;
4503
4504 itr = internallyRequestedStreams.begin();
4505 if (itr == internallyRequestedStreams.end()) {
4506 ALOGE("Error Internally Requested Stream list is empty");
4507 assert(0);
4508 } else {
4509 itr->need_metadata = 0;
4510 itr->meteringOnly = 1;
4511 }
4512 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4513 request->frame_number = internalFrameNumber;
4514 processCaptureRequest(request, internallyRequestedStreams);
4515
4516 itr = internallyRequestedStreams.begin();
4517 if (itr == internallyRequestedStreams.end()) {
4518 ALOGE("Error Internally Requested Stream list is empty");
4519 assert(0);
4520 } else {
4521 itr->need_metadata = 1;
4522 itr->meteringOnly = 0;
4523 }
4524
4525 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4526 request->frame_number = internalFrameNumber;
4527 processCaptureRequest(request, internallyRequestedStreams);
4528
4529
4530 /* Capture 2X on original streaming config*/
4531 internallyRequestedStreams.clear();
4532
4533 /* Restore original settings pointer */
4534 request->settings = original_settings;
4535 } else {
4536 uint32_t internalFrameNumber;
4537 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4538 request->frame_number = internalFrameNumber;
4539 return processCaptureRequest(request, internallyRequestedStreams);
4540 }
4541
4542 return NO_ERROR;
4543}
4544
4545/*===========================================================================
4546 * FUNCTION : orchestrateResult
4547 *
4548 * DESCRIPTION: Orchestrates a capture result to camera service
4549 *
4550 * PARAMETERS :
4551 * @request : request from framework to process
4552 *
4553 * RETURN :
4554 *
4555 *==========================================================================*/
4556void QCamera3HardwareInterface::orchestrateResult(
4557 camera3_capture_result_t *result)
4558{
4559 uint32_t frameworkFrameNumber;
4560 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4561 frameworkFrameNumber);
4562 if (rc != NO_ERROR) {
4563 LOGE("Cannot find translated frameworkFrameNumber");
4564 assert(0);
4565 } else {
4566 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004567 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004568 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004569 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004570 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4571 camera_metadata_entry_t entry;
4572 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4573 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004574 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004575 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4576 if (ret != OK)
4577 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004578 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004579 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004580 result->frame_number = frameworkFrameNumber;
4581 mCallbackOps->process_capture_result(mCallbackOps, result);
4582 }
4583 }
4584}
4585
4586/*===========================================================================
4587 * FUNCTION : orchestrateNotify
4588 *
4589 * DESCRIPTION: Orchestrates a notify to camera service
4590 *
4591 * PARAMETERS :
4592 * @request : request from framework to process
4593 *
4594 * RETURN :
4595 *
4596 *==========================================================================*/
4597void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4598{
4599 uint32_t frameworkFrameNumber;
4600 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004601 int32_t rc = NO_ERROR;
4602
4603 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004604 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004605
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004606 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004607 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4608 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4609 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004610 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004611 LOGE("Cannot find translated frameworkFrameNumber");
4612 assert(0);
4613 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004614 }
4615 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004616
4617 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4618 LOGD("Internal Request drop the notifyCb");
4619 } else {
4620 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4621 mCallbackOps->notify(mCallbackOps, notify_msg);
4622 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004623}
4624
4625/*===========================================================================
4626 * FUNCTION : FrameNumberRegistry
4627 *
4628 * DESCRIPTION: Constructor
4629 *
4630 * PARAMETERS :
4631 *
4632 * RETURN :
4633 *
4634 *==========================================================================*/
4635FrameNumberRegistry::FrameNumberRegistry()
4636{
4637 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4638}
4639
4640/*===========================================================================
4641 * FUNCTION : ~FrameNumberRegistry
4642 *
4643 * DESCRIPTION: Destructor
4644 *
4645 * PARAMETERS :
4646 *
4647 * RETURN :
4648 *
4649 *==========================================================================*/
4650FrameNumberRegistry::~FrameNumberRegistry()
4651{
4652}
4653
4654/*===========================================================================
4655 * FUNCTION : PurgeOldEntriesLocked
4656 *
4657 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4658 *
4659 * PARAMETERS :
4660 *
4661 * RETURN : NONE
4662 *
4663 *==========================================================================*/
4664void FrameNumberRegistry::purgeOldEntriesLocked()
4665{
4666 while (_register.begin() != _register.end()) {
4667 auto itr = _register.begin();
4668 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4669 _register.erase(itr);
4670 } else {
4671 return;
4672 }
4673 }
4674}
4675
4676/*===========================================================================
4677 * FUNCTION : allocStoreInternalFrameNumber
4678 *
4679 * DESCRIPTION: Method to note down a framework request and associate a new
4680 * internal request number against it
4681 *
4682 * PARAMETERS :
4683 * @fFrameNumber: Identifier given by framework
4684 * @internalFN : Output parameter which will have the newly generated internal
4685 * entry
4686 *
4687 * RETURN : Error code
4688 *
4689 *==========================================================================*/
4690int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4691 uint32_t &internalFrameNumber)
4692{
4693 Mutex::Autolock lock(mRegistryLock);
4694 internalFrameNumber = _nextFreeInternalNumber++;
4695 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4696 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4697 purgeOldEntriesLocked();
4698 return NO_ERROR;
4699}
4700
4701/*===========================================================================
4702 * FUNCTION : generateStoreInternalFrameNumber
4703 *
4704 * DESCRIPTION: Method to associate a new internal request number independent
4705 * of any associate with framework requests
4706 *
4707 * PARAMETERS :
4708 * @internalFrame#: Output parameter which will have the newly generated internal
4709 *
4710 *
4711 * RETURN : Error code
4712 *
4713 *==========================================================================*/
4714int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4715{
4716 Mutex::Autolock lock(mRegistryLock);
4717 internalFrameNumber = _nextFreeInternalNumber++;
4718 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4719 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4720 purgeOldEntriesLocked();
4721 return NO_ERROR;
4722}
4723
4724/*===========================================================================
4725 * FUNCTION : getFrameworkFrameNumber
4726 *
4727 * DESCRIPTION: Method to query the framework framenumber given an internal #
4728 *
4729 * PARAMETERS :
4730 * @internalFrame#: Internal reference
4731 * @frameworkframenumber: Output parameter holding framework frame entry
4732 *
4733 * RETURN : Error code
4734 *
4735 *==========================================================================*/
4736int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4737 uint32_t &frameworkFrameNumber)
4738{
4739 Mutex::Autolock lock(mRegistryLock);
4740 auto itr = _register.find(internalFrameNumber);
4741 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004742 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004743 return -ENOENT;
4744 }
4745
4746 frameworkFrameNumber = itr->second;
4747 purgeOldEntriesLocked();
4748 return NO_ERROR;
4749}
Thierry Strudel3d639192016-09-09 11:52:26 -07004750
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004751status_t QCamera3HardwareInterface::fillPbStreamConfig(
4752 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4753 QCamera3Channel *channel, uint32_t streamIndex) {
4754 if (config == nullptr) {
4755 LOGE("%s: config is null", __FUNCTION__);
4756 return BAD_VALUE;
4757 }
4758
4759 if (channel == nullptr) {
4760 LOGE("%s: channel is null", __FUNCTION__);
4761 return BAD_VALUE;
4762 }
4763
4764 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4765 if (stream == nullptr) {
4766 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4767 return NAME_NOT_FOUND;
4768 }
4769
4770 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4771 if (streamInfo == nullptr) {
4772 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4773 return NAME_NOT_FOUND;
4774 }
4775
4776 config->id = pbStreamId;
4777 config->image.width = streamInfo->dim.width;
4778 config->image.height = streamInfo->dim.height;
4779 config->image.padding = 0;
4780 config->image.format = pbStreamFormat;
4781
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004782 uint32_t totalPlaneSize = 0;
4783
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004784 // Fill plane information.
4785 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4786 pbcamera::PlaneConfiguration plane;
4787 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4788 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4789 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004790
4791 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004792 }
4793
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004794 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004795 return OK;
4796}
4797
Thierry Strudel3d639192016-09-09 11:52:26 -07004798/*===========================================================================
4799 * FUNCTION : processCaptureRequest
4800 *
4801 * DESCRIPTION: process a capture request from camera service
4802 *
4803 * PARAMETERS :
4804 * @request : request from framework to process
4805 *
4806 * RETURN :
4807 *
4808 *==========================================================================*/
4809int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004810 camera3_capture_request_t *request,
4811 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004812{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004813 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004814 int rc = NO_ERROR;
4815 int32_t request_id;
4816 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004817 bool isVidBufRequested = false;
4818 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004819 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004820
4821 pthread_mutex_lock(&mMutex);
4822
4823 // Validate current state
4824 switch (mState) {
4825 case CONFIGURED:
4826 case STARTED:
4827 /* valid state */
4828 break;
4829
4830 case ERROR:
4831 pthread_mutex_unlock(&mMutex);
4832 handleCameraDeviceError();
4833 return -ENODEV;
4834
4835 default:
4836 LOGE("Invalid state %d", mState);
4837 pthread_mutex_unlock(&mMutex);
4838 return -ENODEV;
4839 }
4840
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004841 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004842 if (rc != NO_ERROR) {
4843 LOGE("incoming request is not valid");
4844 pthread_mutex_unlock(&mMutex);
4845 return rc;
4846 }
4847
4848 meta = request->settings;
4849
4850 // For first capture request, send capture intent, and
4851 // stream on all streams
4852 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004853 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004854 // send an unconfigure to the backend so that the isp
4855 // resources are deallocated
4856 if (!mFirstConfiguration) {
4857 cam_stream_size_info_t stream_config_info;
4858 int32_t hal_version = CAM_HAL_V3;
4859 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4860 stream_config_info.buffer_info.min_buffers =
4861 MIN_INFLIGHT_REQUESTS;
4862 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004863 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004864 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004865 clear_metadata_buffer(mParameters);
4866 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4867 CAM_INTF_PARM_HAL_VERSION, hal_version);
4868 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4869 CAM_INTF_META_STREAM_INFO, stream_config_info);
4870 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4871 mParameters);
4872 if (rc < 0) {
4873 LOGE("set_parms for unconfigure failed");
4874 pthread_mutex_unlock(&mMutex);
4875 return rc;
4876 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004877
Thierry Strudel3d639192016-09-09 11:52:26 -07004878 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004879 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004880 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004881 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004882 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004883 property_get("persist.camera.is_type", is_type_value, "4");
4884 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4885 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4886 property_get("persist.camera.is_type_preview", is_type_value, "4");
4887 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4888 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004889
4890 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4891 int32_t hal_version = CAM_HAL_V3;
4892 uint8_t captureIntent =
4893 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4894 mCaptureIntent = captureIntent;
4895 clear_metadata_buffer(mParameters);
4896 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4897 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4898 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004899 if (mFirstConfiguration) {
4900 // configure instant AEC
4901 // Instant AEC is a session based parameter and it is needed only
4902 // once per complete session after open camera.
4903 // i.e. This is set only once for the first capture request, after open camera.
4904 setInstantAEC(meta);
4905 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004906 uint8_t fwkVideoStabMode=0;
4907 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4908 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4909 }
4910
Xue Tuecac74e2017-04-17 13:58:15 -07004911 // If EIS setprop is enabled then only turn it on for video/preview
4912 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004913 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004914 int32_t vsMode;
4915 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4916 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4917 rc = BAD_VALUE;
4918 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004919 LOGD("setEis %d", setEis);
4920 bool eis3Supported = false;
4921 size_t count = IS_TYPE_MAX;
4922 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4923 for (size_t i = 0; i < count; i++) {
4924 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4925 eis3Supported = true;
4926 break;
4927 }
4928 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004929
4930 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004931 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004932 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4933 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004934 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4935 is_type = isTypePreview;
4936 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4937 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4938 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004939 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004940 } else {
4941 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004942 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004943 } else {
4944 is_type = IS_TYPE_NONE;
4945 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004946 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004947 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004948 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4949 }
4950 }
4951
4952 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4953 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4954
Thierry Strudel54dc9782017-02-15 12:12:10 -08004955 //Disable tintless only if the property is set to 0
4956 memset(prop, 0, sizeof(prop));
4957 property_get("persist.camera.tintless.enable", prop, "1");
4958 int32_t tintless_value = atoi(prop);
4959
Thierry Strudel3d639192016-09-09 11:52:26 -07004960 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4961 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004962
Thierry Strudel3d639192016-09-09 11:52:26 -07004963 //Disable CDS for HFR mode or if DIS/EIS is on.
4964 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4965 //after every configure_stream
4966 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4967 (m_bIsVideo)) {
4968 int32_t cds = CAM_CDS_MODE_OFF;
4969 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4970 CAM_INTF_PARM_CDS_MODE, cds))
4971 LOGE("Failed to disable CDS for HFR mode");
4972
4973 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004974
4975 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4976 uint8_t* use_av_timer = NULL;
4977
4978 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004979 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004980 use_av_timer = &m_debug_avtimer;
4981 }
4982 else{
4983 use_av_timer =
4984 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004985 if (use_av_timer) {
4986 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4987 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004988 }
4989
4990 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4991 rc = BAD_VALUE;
4992 }
4993 }
4994
Thierry Strudel3d639192016-09-09 11:52:26 -07004995 setMobicat();
4996
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004997 uint8_t nrMode = 0;
4998 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4999 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5000 }
5001
Thierry Strudel3d639192016-09-09 11:52:26 -07005002 /* Set fps and hfr mode while sending meta stream info so that sensor
5003 * can configure appropriate streaming mode */
5004 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005005 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5006 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005007 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5008 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005009 if (rc == NO_ERROR) {
5010 int32_t max_fps =
5011 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005012 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005013 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5014 }
5015 /* For HFR, more buffers are dequeued upfront to improve the performance */
5016 if (mBatchSize) {
5017 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5018 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5019 }
5020 }
5021 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005022 LOGE("setHalFpsRange failed");
5023 }
5024 }
5025 if (meta.exists(ANDROID_CONTROL_MODE)) {
5026 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5027 rc = extractSceneMode(meta, metaMode, mParameters);
5028 if (rc != NO_ERROR) {
5029 LOGE("extractSceneMode failed");
5030 }
5031 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005032 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005033
Thierry Strudel04e026f2016-10-10 11:27:36 -07005034 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5035 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5036 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5037 rc = setVideoHdrMode(mParameters, vhdr);
5038 if (rc != NO_ERROR) {
5039 LOGE("setVideoHDR is failed");
5040 }
5041 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005042
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005043 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005044 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005045 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005046 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5047 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5048 sensorModeFullFov)) {
5049 rc = BAD_VALUE;
5050 }
5051 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005052 //TODO: validate the arguments, HSV scenemode should have only the
5053 //advertised fps ranges
5054
5055 /*set the capture intent, hal version, tintless, stream info,
5056 *and disenable parameters to the backend*/
5057 LOGD("set_parms META_STREAM_INFO " );
5058 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005059 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5060 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005061 mStreamConfigInfo.type[i],
5062 mStreamConfigInfo.stream_sizes[i].width,
5063 mStreamConfigInfo.stream_sizes[i].height,
5064 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005065 mStreamConfigInfo.format[i],
5066 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005067 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005068
Thierry Strudel3d639192016-09-09 11:52:26 -07005069 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5070 mParameters);
5071 if (rc < 0) {
5072 LOGE("set_parms failed for hal version, stream info");
5073 }
5074
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005075 cam_sensor_mode_info_t sensorModeInfo = {};
5076 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005077 if (rc != NO_ERROR) {
5078 LOGE("Failed to get sensor output size");
5079 pthread_mutex_unlock(&mMutex);
5080 goto error_exit;
5081 }
5082
5083 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5084 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005085 sensorModeInfo.active_array_size.width,
5086 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005087
5088 /* Set batchmode before initializing channel. Since registerBuffer
5089 * internally initializes some of the channels, better set batchmode
5090 * even before first register buffer */
5091 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5092 it != mStreamInfo.end(); it++) {
5093 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5094 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5095 && mBatchSize) {
5096 rc = channel->setBatchSize(mBatchSize);
5097 //Disable per frame map unmap for HFR/batchmode case
5098 rc |= channel->setPerFrameMapUnmap(false);
5099 if (NO_ERROR != rc) {
5100 LOGE("Channel init failed %d", rc);
5101 pthread_mutex_unlock(&mMutex);
5102 goto error_exit;
5103 }
5104 }
5105 }
5106
5107 //First initialize all streams
5108 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5109 it != mStreamInfo.end(); it++) {
5110 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005111
5112 /* Initial value of NR mode is needed before stream on */
5113 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005114 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5115 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005116 setEis) {
5117 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5118 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5119 is_type = mStreamConfigInfo.is_type[i];
5120 break;
5121 }
5122 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005123 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005124 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005125 rc = channel->initialize(IS_TYPE_NONE);
5126 }
5127 if (NO_ERROR != rc) {
5128 LOGE("Channel initialization failed %d", rc);
5129 pthread_mutex_unlock(&mMutex);
5130 goto error_exit;
5131 }
5132 }
5133
5134 if (mRawDumpChannel) {
5135 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5136 if (rc != NO_ERROR) {
5137 LOGE("Error: Raw Dump Channel init failed");
5138 pthread_mutex_unlock(&mMutex);
5139 goto error_exit;
5140 }
5141 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005142 if (mHdrPlusRawSrcChannel) {
5143 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5144 if (rc != NO_ERROR) {
5145 LOGE("Error: HDR+ RAW Source Channel init failed");
5146 pthread_mutex_unlock(&mMutex);
5147 goto error_exit;
5148 }
5149 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005150 if (mSupportChannel) {
5151 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5152 if (rc < 0) {
5153 LOGE("Support channel initialization failed");
5154 pthread_mutex_unlock(&mMutex);
5155 goto error_exit;
5156 }
5157 }
5158 if (mAnalysisChannel) {
5159 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5160 if (rc < 0) {
5161 LOGE("Analysis channel initialization failed");
5162 pthread_mutex_unlock(&mMutex);
5163 goto error_exit;
5164 }
5165 }
5166 if (mDummyBatchChannel) {
5167 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5168 if (rc < 0) {
5169 LOGE("mDummyBatchChannel setBatchSize failed");
5170 pthread_mutex_unlock(&mMutex);
5171 goto error_exit;
5172 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005173 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005174 if (rc < 0) {
5175 LOGE("mDummyBatchChannel initialization failed");
5176 pthread_mutex_unlock(&mMutex);
5177 goto error_exit;
5178 }
5179 }
5180
5181 // Set bundle info
5182 rc = setBundleInfo();
5183 if (rc < 0) {
5184 LOGE("setBundleInfo failed %d", rc);
5185 pthread_mutex_unlock(&mMutex);
5186 goto error_exit;
5187 }
5188
5189 //update settings from app here
5190 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5191 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5192 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5193 }
5194 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5195 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5196 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5197 }
5198 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5199 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5200 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5201
5202 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5203 (mLinkedCameraId != mCameraId) ) {
5204 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5205 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005206 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005207 goto error_exit;
5208 }
5209 }
5210
5211 // add bundle related cameras
5212 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5213 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005214 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5215 &m_pDualCamCmdPtr->bundle_info;
5216 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005217 if (mIsDeviceLinked)
5218 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5219 else
5220 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5221
5222 pthread_mutex_lock(&gCamLock);
5223
5224 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5225 LOGE("Dualcam: Invalid Session Id ");
5226 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005227 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005228 goto error_exit;
5229 }
5230
5231 if (mIsMainCamera == 1) {
5232 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5233 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005234 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005235 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005236 // related session id should be session id of linked session
5237 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5238 } else {
5239 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5240 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005241 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005242 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005243 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5244 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005245 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005246 pthread_mutex_unlock(&gCamLock);
5247
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005248 rc = mCameraHandle->ops->set_dual_cam_cmd(
5249 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005250 if (rc < 0) {
5251 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005252 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005253 goto error_exit;
5254 }
5255 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005256 goto no_error;
5257error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005258 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005259 return rc;
5260no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005261 mWokenUpByDaemon = false;
5262 mPendingLiveRequest = 0;
5263 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005264 }
5265
5266 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005267 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005268
5269 if (mFlushPerf) {
5270 //we cannot accept any requests during flush
5271 LOGE("process_capture_request cannot proceed during flush");
5272 pthread_mutex_unlock(&mMutex);
5273 return NO_ERROR; //should return an error
5274 }
5275
5276 if (meta.exists(ANDROID_REQUEST_ID)) {
5277 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5278 mCurrentRequestId = request_id;
5279 LOGD("Received request with id: %d", request_id);
5280 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5281 LOGE("Unable to find request id field, \
5282 & no previous id available");
5283 pthread_mutex_unlock(&mMutex);
5284 return NAME_NOT_FOUND;
5285 } else {
5286 LOGD("Re-using old request id");
5287 request_id = mCurrentRequestId;
5288 }
5289
5290 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5291 request->num_output_buffers,
5292 request->input_buffer,
5293 frameNumber);
5294 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005295 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005296 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005297 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005298 uint32_t snapshotStreamId = 0;
5299 for (size_t i = 0; i < request->num_output_buffers; i++) {
5300 const camera3_stream_buffer_t& output = request->output_buffers[i];
5301 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5302
Emilian Peev7650c122017-01-19 08:24:33 -08005303 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5304 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005305 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005306 blob_request = 1;
5307 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5308 }
5309
5310 if (output.acquire_fence != -1) {
5311 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5312 close(output.acquire_fence);
5313 if (rc != OK) {
5314 LOGE("sync wait failed %d", rc);
5315 pthread_mutex_unlock(&mMutex);
5316 return rc;
5317 }
5318 }
5319
Emilian Peev0f3c3162017-03-15 12:57:46 +00005320 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5321 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005322 depthRequestPresent = true;
5323 continue;
5324 }
5325
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005326 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005327 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005328
5329 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5330 isVidBufRequested = true;
5331 }
5332 }
5333
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005334 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5335 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5336 itr++) {
5337 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5338 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5339 channel->getStreamID(channel->getStreamTypeMask());
5340
5341 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5342 isVidBufRequested = true;
5343 }
5344 }
5345
Thierry Strudel3d639192016-09-09 11:52:26 -07005346 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005347 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005348 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005349 }
5350 if (blob_request && mRawDumpChannel) {
5351 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005352 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005353 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005354 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005355 }
5356
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005357 {
5358 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5359 // Request a RAW buffer if
5360 // 1. mHdrPlusRawSrcChannel is valid.
5361 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5362 // 3. There is no pending HDR+ request.
5363 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5364 mHdrPlusPendingRequests.size() == 0) {
5365 streamsArray.stream_request[streamsArray.num_streams].streamID =
5366 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5367 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5368 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005369 }
5370
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005371 //extract capture intent
5372 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5373 mCaptureIntent =
5374 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5375 }
5376
5377 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5378 mCacMode =
5379 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5380 }
5381
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005382 uint8_t requestedLensShadingMapMode;
5383 // Get the shading map mode.
5384 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5385 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5386 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5387 } else {
5388 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5389 }
5390
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005391 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005392 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005393
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005394 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005395 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005396 // If this request has a still capture intent, try to submit an HDR+ request.
5397 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5398 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5399 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5400 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005401 }
5402
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005403 if (hdrPlusRequest) {
5404 // For a HDR+ request, just set the frame parameters.
5405 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5406 if (rc < 0) {
5407 LOGE("fail to set frame parameters");
5408 pthread_mutex_unlock(&mMutex);
5409 return rc;
5410 }
5411 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005412 /* Parse the settings:
5413 * - For every request in NORMAL MODE
5414 * - For every request in HFR mode during preview only case
5415 * - For first request of every batch in HFR mode during video
5416 * recording. In batchmode the same settings except frame number is
5417 * repeated in each request of the batch.
5418 */
5419 if (!mBatchSize ||
5420 (mBatchSize && !isVidBufRequested) ||
5421 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005422 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005423 if (rc < 0) {
5424 LOGE("fail to set frame parameters");
5425 pthread_mutex_unlock(&mMutex);
5426 return rc;
5427 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005428
5429 {
5430 // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
5431 // will be reported in result metadata.
5432 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5433 if (mHdrPlusModeEnabled) {
5434 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5435 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5436 }
5437 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005438 }
5439 /* For batchMode HFR, setFrameParameters is not called for every
5440 * request. But only frame number of the latest request is parsed.
5441 * Keep track of first and last frame numbers in a batch so that
5442 * metadata for the frame numbers of batch can be duplicated in
5443 * handleBatchMetadta */
5444 if (mBatchSize) {
5445 if (!mToBeQueuedVidBufs) {
5446 //start of the batch
5447 mFirstFrameNumberInBatch = request->frame_number;
5448 }
5449 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5450 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5451 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005452 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005453 return BAD_VALUE;
5454 }
5455 }
5456 if (mNeedSensorRestart) {
5457 /* Unlock the mutex as restartSensor waits on the channels to be
5458 * stopped, which in turn calls stream callback functions -
5459 * handleBufferWithLock and handleMetadataWithLock */
5460 pthread_mutex_unlock(&mMutex);
5461 rc = dynamicUpdateMetaStreamInfo();
5462 if (rc != NO_ERROR) {
5463 LOGE("Restarting the sensor failed");
5464 return BAD_VALUE;
5465 }
5466 mNeedSensorRestart = false;
5467 pthread_mutex_lock(&mMutex);
5468 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005469 if(mResetInstantAEC) {
5470 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5471 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5472 mResetInstantAEC = false;
5473 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005474 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005475 if (request->input_buffer->acquire_fence != -1) {
5476 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5477 close(request->input_buffer->acquire_fence);
5478 if (rc != OK) {
5479 LOGE("input buffer sync wait failed %d", rc);
5480 pthread_mutex_unlock(&mMutex);
5481 return rc;
5482 }
5483 }
5484 }
5485
5486 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5487 mLastCustIntentFrmNum = frameNumber;
5488 }
5489 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005490 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005491 pendingRequestIterator latestRequest;
5492 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005493 pendingRequest.num_buffers = depthRequestPresent ?
5494 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005495 pendingRequest.request_id = request_id;
5496 pendingRequest.blob_request = blob_request;
5497 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005498 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005499 if (request->input_buffer) {
5500 pendingRequest.input_buffer =
5501 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5502 *(pendingRequest.input_buffer) = *(request->input_buffer);
5503 pInputBuffer = pendingRequest.input_buffer;
5504 } else {
5505 pendingRequest.input_buffer = NULL;
5506 pInputBuffer = NULL;
5507 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005508 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005509
5510 pendingRequest.pipeline_depth = 0;
5511 pendingRequest.partial_result_cnt = 0;
5512 extractJpegMetadata(mCurJpegMeta, request);
5513 pendingRequest.jpegMetadata = mCurJpegMeta;
5514 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005515 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005516 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
Shuzhen Wang77b049a2017-08-30 12:24:36 -07005517 pendingRequest.hybrid_ae_enable =
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005518 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5519 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005520
Samuel Ha68ba5172016-12-15 18:41:12 -08005521 /* DevCamDebug metadata processCaptureRequest */
5522 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5523 mDevCamDebugMetaEnable =
5524 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5525 }
5526 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5527 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005528
5529 //extract CAC info
5530 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5531 mCacMode =
5532 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5533 }
5534 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005535 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005536 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5537 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005538
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005539 // extract enableZsl info
5540 if (gExposeEnableZslKey) {
5541 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5542 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5543 mZslEnabled = pendingRequest.enableZsl;
5544 } else {
5545 pendingRequest.enableZsl = mZslEnabled;
5546 }
5547 }
5548
Thierry Strudel3d639192016-09-09 11:52:26 -07005549 PendingBuffersInRequest bufsForCurRequest;
5550 bufsForCurRequest.frame_number = frameNumber;
5551 // Mark current timestamp for the new request
5552 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005553 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005554
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005555 if (hdrPlusRequest) {
5556 // Save settings for this request.
5557 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5558 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5559
5560 // Add to pending HDR+ request queue.
5561 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5562 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5563
5564 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5565 }
5566
Thierry Strudel3d639192016-09-09 11:52:26 -07005567 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005568 if ((request->output_buffers[i].stream->data_space ==
5569 HAL_DATASPACE_DEPTH) &&
5570 (HAL_PIXEL_FORMAT_BLOB ==
5571 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005572 continue;
5573 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005574 RequestedBufferInfo requestedBuf;
5575 memset(&requestedBuf, 0, sizeof(requestedBuf));
5576 requestedBuf.stream = request->output_buffers[i].stream;
5577 requestedBuf.buffer = NULL;
5578 pendingRequest.buffers.push_back(requestedBuf);
5579
5580 // Add to buffer handle the pending buffers list
5581 PendingBufferInfo bufferInfo;
5582 bufferInfo.buffer = request->output_buffers[i].buffer;
5583 bufferInfo.stream = request->output_buffers[i].stream;
5584 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5585 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5586 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5587 frameNumber, bufferInfo.buffer,
5588 channel->getStreamTypeMask(), bufferInfo.stream->format);
5589 }
5590 // Add this request packet into mPendingBuffersMap
5591 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5592 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5593 mPendingBuffersMap.get_num_overall_buffers());
5594
5595 latestRequest = mPendingRequestsList.insert(
5596 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005597
5598 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5599 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005600 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005601 for (size_t i = 0; i < request->num_output_buffers; i++) {
5602 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5603 }
5604
Thierry Strudel3d639192016-09-09 11:52:26 -07005605 if(mFlush) {
5606 LOGI("mFlush is true");
5607 pthread_mutex_unlock(&mMutex);
5608 return NO_ERROR;
5609 }
5610
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005611 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5612 // channel.
5613 if (!hdrPlusRequest) {
5614 int indexUsed;
5615 // Notify metadata channel we receive a request
5616 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005617
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005618 if(request->input_buffer != NULL){
5619 LOGD("Input request, frame_number %d", frameNumber);
5620 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5621 if (NO_ERROR != rc) {
5622 LOGE("fail to set reproc parameters");
5623 pthread_mutex_unlock(&mMutex);
5624 return rc;
5625 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005626 }
5627
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005628 // Call request on other streams
5629 uint32_t streams_need_metadata = 0;
5630 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5631 for (size_t i = 0; i < request->num_output_buffers; i++) {
5632 const camera3_stream_buffer_t& output = request->output_buffers[i];
5633 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5634
5635 if (channel == NULL) {
5636 LOGW("invalid channel pointer for stream");
5637 continue;
5638 }
5639
5640 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5641 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5642 output.buffer, request->input_buffer, frameNumber);
5643 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005644 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005645 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5646 if (rc < 0) {
5647 LOGE("Fail to request on picture channel");
5648 pthread_mutex_unlock(&mMutex);
5649 return rc;
5650 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005651 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005652 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5653 assert(NULL != mDepthChannel);
5654 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005655
Emilian Peev7650c122017-01-19 08:24:33 -08005656 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5657 if (rc < 0) {
5658 LOGE("Fail to map on depth buffer");
5659 pthread_mutex_unlock(&mMutex);
5660 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005661 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005662 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005663 } else {
5664 LOGD("snapshot request with buffer %p, frame_number %d",
5665 output.buffer, frameNumber);
5666 if (!request->settings) {
5667 rc = channel->request(output.buffer, frameNumber,
5668 NULL, mPrevParameters, indexUsed);
5669 } else {
5670 rc = channel->request(output.buffer, frameNumber,
5671 NULL, mParameters, indexUsed);
5672 }
5673 if (rc < 0) {
5674 LOGE("Fail to request on picture channel");
5675 pthread_mutex_unlock(&mMutex);
5676 return rc;
5677 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005678
Emilian Peev7650c122017-01-19 08:24:33 -08005679 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5680 uint32_t j = 0;
5681 for (j = 0; j < streamsArray.num_streams; j++) {
5682 if (streamsArray.stream_request[j].streamID == streamId) {
5683 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5684 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5685 else
5686 streamsArray.stream_request[j].buf_index = indexUsed;
5687 break;
5688 }
5689 }
5690 if (j == streamsArray.num_streams) {
5691 LOGE("Did not find matching stream to update index");
5692 assert(0);
5693 }
5694
5695 pendingBufferIter->need_metadata = true;
5696 streams_need_metadata++;
5697 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005698 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005699 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5700 bool needMetadata = false;
5701 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5702 rc = yuvChannel->request(output.buffer, frameNumber,
5703 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5704 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005705 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005706 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005707 pthread_mutex_unlock(&mMutex);
5708 return rc;
5709 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005710
5711 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5712 uint32_t j = 0;
5713 for (j = 0; j < streamsArray.num_streams; j++) {
5714 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005715 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5716 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5717 else
5718 streamsArray.stream_request[j].buf_index = indexUsed;
5719 break;
5720 }
5721 }
5722 if (j == streamsArray.num_streams) {
5723 LOGE("Did not find matching stream to update index");
5724 assert(0);
5725 }
5726
5727 pendingBufferIter->need_metadata = needMetadata;
5728 if (needMetadata)
5729 streams_need_metadata += 1;
5730 LOGD("calling YUV channel request, need_metadata is %d",
5731 needMetadata);
5732 } else {
5733 LOGD("request with buffer %p, frame_number %d",
5734 output.buffer, frameNumber);
5735
5736 rc = channel->request(output.buffer, frameNumber, indexUsed);
5737
5738 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5739 uint32_t j = 0;
5740 for (j = 0; j < streamsArray.num_streams; j++) {
5741 if (streamsArray.stream_request[j].streamID == streamId) {
5742 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5743 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5744 else
5745 streamsArray.stream_request[j].buf_index = indexUsed;
5746 break;
5747 }
5748 }
5749 if (j == streamsArray.num_streams) {
5750 LOGE("Did not find matching stream to update index");
5751 assert(0);
5752 }
5753
5754 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5755 && mBatchSize) {
5756 mToBeQueuedVidBufs++;
5757 if (mToBeQueuedVidBufs == mBatchSize) {
5758 channel->queueBatchBuf();
5759 }
5760 }
5761 if (rc < 0) {
5762 LOGE("request failed");
5763 pthread_mutex_unlock(&mMutex);
5764 return rc;
5765 }
5766 }
5767 pendingBufferIter++;
5768 }
5769
5770 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5771 itr++) {
5772 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5773
5774 if (channel == NULL) {
5775 LOGE("invalid channel pointer for stream");
5776 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005777 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005778 return BAD_VALUE;
5779 }
5780
5781 InternalRequest requestedStream;
5782 requestedStream = (*itr);
5783
5784
5785 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5786 LOGD("snapshot request internally input buffer %p, frame_number %d",
5787 request->input_buffer, frameNumber);
5788 if(request->input_buffer != NULL){
5789 rc = channel->request(NULL, frameNumber,
5790 pInputBuffer, &mReprocMeta, indexUsed, true,
5791 requestedStream.meteringOnly);
5792 if (rc < 0) {
5793 LOGE("Fail to request on picture channel");
5794 pthread_mutex_unlock(&mMutex);
5795 return rc;
5796 }
5797 } else {
5798 LOGD("snapshot request with frame_number %d", frameNumber);
5799 if (!request->settings) {
5800 rc = channel->request(NULL, frameNumber,
5801 NULL, mPrevParameters, indexUsed, true,
5802 requestedStream.meteringOnly);
5803 } else {
5804 rc = channel->request(NULL, frameNumber,
5805 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5806 }
5807 if (rc < 0) {
5808 LOGE("Fail to request on picture channel");
5809 pthread_mutex_unlock(&mMutex);
5810 return rc;
5811 }
5812
5813 if ((*itr).meteringOnly != 1) {
5814 requestedStream.need_metadata = 1;
5815 streams_need_metadata++;
5816 }
5817 }
5818
5819 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5820 uint32_t j = 0;
5821 for (j = 0; j < streamsArray.num_streams; j++) {
5822 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005823 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5824 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5825 else
5826 streamsArray.stream_request[j].buf_index = indexUsed;
5827 break;
5828 }
5829 }
5830 if (j == streamsArray.num_streams) {
5831 LOGE("Did not find matching stream to update index");
5832 assert(0);
5833 }
5834
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005835 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005836 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005837 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005838 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005839 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005840 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005841 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005842 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005843
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005844 //If 2 streams have need_metadata set to true, fail the request, unless
5845 //we copy/reference count the metadata buffer
5846 if (streams_need_metadata > 1) {
5847 LOGE("not supporting request in which two streams requires"
5848 " 2 HAL metadata for reprocessing");
5849 pthread_mutex_unlock(&mMutex);
5850 return -EINVAL;
5851 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005852
Emilian Peev656e4fa2017-06-02 16:47:04 +01005853 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5854 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5855 if (depthRequestPresent && mDepthChannel) {
5856 if (request->settings) {
5857 camera_metadata_ro_entry entry;
5858 if (find_camera_metadata_ro_entry(request->settings,
5859 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5860 if (entry.data.u8[0]) {
5861 pdafEnable = CAM_PD_DATA_ENABLED;
5862 } else {
5863 pdafEnable = CAM_PD_DATA_SKIP;
5864 }
5865 mDepthCloudMode = pdafEnable;
5866 } else {
5867 pdafEnable = mDepthCloudMode;
5868 }
5869 } else {
5870 pdafEnable = mDepthCloudMode;
5871 }
5872 }
5873
Emilian Peev7650c122017-01-19 08:24:33 -08005874 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5875 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5876 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5877 pthread_mutex_unlock(&mMutex);
5878 return BAD_VALUE;
5879 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005880
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005881 if (request->input_buffer == NULL) {
5882 /* Set the parameters to backend:
5883 * - For every request in NORMAL MODE
5884 * - For every request in HFR mode during preview only case
5885 * - Once every batch in HFR mode during video recording
5886 */
5887 if (!mBatchSize ||
5888 (mBatchSize && !isVidBufRequested) ||
5889 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5890 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5891 mBatchSize, isVidBufRequested,
5892 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005893
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005894 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5895 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5896 uint32_t m = 0;
5897 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5898 if (streamsArray.stream_request[k].streamID ==
5899 mBatchedStreamsArray.stream_request[m].streamID)
5900 break;
5901 }
5902 if (m == mBatchedStreamsArray.num_streams) {
5903 mBatchedStreamsArray.stream_request\
5904 [mBatchedStreamsArray.num_streams].streamID =
5905 streamsArray.stream_request[k].streamID;
5906 mBatchedStreamsArray.stream_request\
5907 [mBatchedStreamsArray.num_streams].buf_index =
5908 streamsArray.stream_request[k].buf_index;
5909 mBatchedStreamsArray.num_streams =
5910 mBatchedStreamsArray.num_streams + 1;
5911 }
5912 }
5913 streamsArray = mBatchedStreamsArray;
5914 }
5915 /* Update stream id of all the requested buffers */
5916 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5917 streamsArray)) {
5918 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005919 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005920 return BAD_VALUE;
5921 }
5922
5923 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5924 mParameters);
5925 if (rc < 0) {
5926 LOGE("set_parms failed");
5927 }
5928 /* reset to zero coz, the batch is queued */
5929 mToBeQueuedVidBufs = 0;
5930 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5931 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5932 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005933 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5934 uint32_t m = 0;
5935 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5936 if (streamsArray.stream_request[k].streamID ==
5937 mBatchedStreamsArray.stream_request[m].streamID)
5938 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005939 }
5940 if (m == mBatchedStreamsArray.num_streams) {
5941 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5942 streamID = streamsArray.stream_request[k].streamID;
5943 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5944 buf_index = streamsArray.stream_request[k].buf_index;
5945 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5946 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005947 }
5948 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005949 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005950
5951 // Start all streams after the first setting is sent, so that the
5952 // setting can be applied sooner: (0 + apply_delay)th frame.
5953 if (mState == CONFIGURED && mChannelHandle) {
5954 //Then start them.
5955 LOGH("Start META Channel");
5956 rc = mMetadataChannel->start();
5957 if (rc < 0) {
5958 LOGE("META channel start failed");
5959 pthread_mutex_unlock(&mMutex);
5960 return rc;
5961 }
5962
5963 if (mAnalysisChannel) {
5964 rc = mAnalysisChannel->start();
5965 if (rc < 0) {
5966 LOGE("Analysis channel start failed");
5967 mMetadataChannel->stop();
5968 pthread_mutex_unlock(&mMutex);
5969 return rc;
5970 }
5971 }
5972
5973 if (mSupportChannel) {
5974 rc = mSupportChannel->start();
5975 if (rc < 0) {
5976 LOGE("Support channel start failed");
5977 mMetadataChannel->stop();
5978 /* Although support and analysis are mutually exclusive today
5979 adding it in anycase for future proofing */
5980 if (mAnalysisChannel) {
5981 mAnalysisChannel->stop();
5982 }
5983 pthread_mutex_unlock(&mMutex);
5984 return rc;
5985 }
5986 }
5987 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5988 it != mStreamInfo.end(); it++) {
5989 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5990 LOGH("Start Processing Channel mask=%d",
5991 channel->getStreamTypeMask());
5992 rc = channel->start();
5993 if (rc < 0) {
5994 LOGE("channel start failed");
5995 pthread_mutex_unlock(&mMutex);
5996 return rc;
5997 }
5998 }
5999
6000 if (mRawDumpChannel) {
6001 LOGD("Starting raw dump stream");
6002 rc = mRawDumpChannel->start();
6003 if (rc != NO_ERROR) {
6004 LOGE("Error Starting Raw Dump Channel");
6005 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6006 it != mStreamInfo.end(); it++) {
6007 QCamera3Channel *channel =
6008 (QCamera3Channel *)(*it)->stream->priv;
6009 LOGH("Stopping Processing Channel mask=%d",
6010 channel->getStreamTypeMask());
6011 channel->stop();
6012 }
6013 if (mSupportChannel)
6014 mSupportChannel->stop();
6015 if (mAnalysisChannel) {
6016 mAnalysisChannel->stop();
6017 }
6018 mMetadataChannel->stop();
6019 pthread_mutex_unlock(&mMutex);
6020 return rc;
6021 }
6022 }
6023
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006024 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006025 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006026 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006027 if (rc != NO_ERROR) {
6028 LOGE("start_channel failed %d", rc);
6029 pthread_mutex_unlock(&mMutex);
6030 return rc;
6031 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006032
6033 {
6034 // Configure Easel for stream on.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006035 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07006036
6037 // Now that sensor mode should have been selected, get the selected sensor mode
6038 // info.
6039 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6040 getCurrentSensorModeInfo(mSensorModeInfo);
6041
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006042 if (EaselManagerClientOpened) {
6043 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006044 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6045 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006046 if (rc != OK) {
6047 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6048 mCameraId, mSensorModeInfo.op_pixel_clk);
6049 pthread_mutex_unlock(&mMutex);
6050 return rc;
6051 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07006052 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006053 }
6054 }
6055
6056 // Start sensor streaming.
6057 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6058 mChannelHandle);
6059 if (rc != NO_ERROR) {
6060 LOGE("start_sensor_stream_on failed %d", rc);
6061 pthread_mutex_unlock(&mMutex);
6062 return rc;
6063 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006064 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006065 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006066 }
6067
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006068 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chenjie Luo4a761802017-06-13 17:35:54 +00006069 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006070 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006071 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006072 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6073 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6074 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6075 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006076
6077 if (isSessionHdrPlusModeCompatible()) {
6078 rc = enableHdrPlusModeLocked();
6079 if (rc != OK) {
6080 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6081 pthread_mutex_unlock(&mMutex);
6082 return rc;
6083 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006084 }
6085
6086 mFirstPreviewIntentSeen = true;
6087 }
6088 }
6089
Thierry Strudel3d639192016-09-09 11:52:26 -07006090 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6091
6092 mState = STARTED;
6093 // Added a timed condition wait
6094 struct timespec ts;
6095 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006096 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006097 if (rc < 0) {
6098 isValidTimeout = 0;
6099 LOGE("Error reading the real time clock!!");
6100 }
6101 else {
6102 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006103 int64_t timeout = 5;
6104 {
6105 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6106 // If there is a pending HDR+ request, the following requests may be blocked until the
6107 // HDR+ request is done. So allow a longer timeout.
6108 if (mHdrPlusPendingRequests.size() > 0) {
6109 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6110 }
6111 }
6112 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006113 }
6114 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006115 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006116 (mState != ERROR) && (mState != DEINIT)) {
6117 if (!isValidTimeout) {
6118 LOGD("Blocking on conditional wait");
6119 pthread_cond_wait(&mRequestCond, &mMutex);
6120 }
6121 else {
6122 LOGD("Blocking on timed conditional wait");
6123 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6124 if (rc == ETIMEDOUT) {
6125 rc = -ENODEV;
6126 LOGE("Unblocked on timeout!!!!");
6127 break;
6128 }
6129 }
6130 LOGD("Unblocked");
6131 if (mWokenUpByDaemon) {
6132 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006133 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006134 break;
6135 }
6136 }
6137 pthread_mutex_unlock(&mMutex);
6138
6139 return rc;
6140}
6141
6142/*===========================================================================
6143 * FUNCTION : dump
6144 *
6145 * DESCRIPTION:
6146 *
6147 * PARAMETERS :
6148 *
6149 *
6150 * RETURN :
6151 *==========================================================================*/
6152void QCamera3HardwareInterface::dump(int fd)
6153{
6154 pthread_mutex_lock(&mMutex);
6155 dprintf(fd, "\n Camera HAL3 information Begin \n");
6156
6157 dprintf(fd, "\nNumber of pending requests: %zu \n",
6158 mPendingRequestsList.size());
6159 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6160 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6161 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6162 for(pendingRequestIterator i = mPendingRequestsList.begin();
6163 i != mPendingRequestsList.end(); i++) {
6164 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6165 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6166 i->input_buffer);
6167 }
6168 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6169 mPendingBuffersMap.get_num_overall_buffers());
6170 dprintf(fd, "-------+------------------\n");
6171 dprintf(fd, " Frame | Stream type mask \n");
6172 dprintf(fd, "-------+------------------\n");
6173 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6174 for(auto &j : req.mPendingBufferList) {
6175 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6176 dprintf(fd, " %5d | %11d \n",
6177 req.frame_number, channel->getStreamTypeMask());
6178 }
6179 }
6180 dprintf(fd, "-------+------------------\n");
6181
6182 dprintf(fd, "\nPending frame drop list: %zu\n",
6183 mPendingFrameDropList.size());
6184 dprintf(fd, "-------+-----------\n");
6185 dprintf(fd, " Frame | Stream ID \n");
6186 dprintf(fd, "-------+-----------\n");
6187 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6188 i != mPendingFrameDropList.end(); i++) {
6189 dprintf(fd, " %5d | %9d \n",
6190 i->frame_number, i->stream_ID);
6191 }
6192 dprintf(fd, "-------+-----------\n");
6193
6194 dprintf(fd, "\n Camera HAL3 information End \n");
6195
6196 /* use dumpsys media.camera as trigger to send update debug level event */
6197 mUpdateDebugLevel = true;
6198 pthread_mutex_unlock(&mMutex);
6199 return;
6200}
6201
6202/*===========================================================================
6203 * FUNCTION : flush
6204 *
6205 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6206 * conditionally restarts channels
6207 *
6208 * PARAMETERS :
6209 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006210 * @ stopChannelImmediately: stop the channel immediately. This should be used
6211 * when device encountered an error and MIPI may has
6212 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006213 *
6214 * RETURN :
6215 * 0 on success
6216 * Error code on failure
6217 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006218int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006219{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006220 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006221 int32_t rc = NO_ERROR;
6222
6223 LOGD("Unblocking Process Capture Request");
6224 pthread_mutex_lock(&mMutex);
6225 mFlush = true;
6226 pthread_mutex_unlock(&mMutex);
6227
6228 rc = stopAllChannels();
6229 // unlink of dualcam
6230 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006231 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6232 &m_pDualCamCmdPtr->bundle_info;
6233 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006234 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6235 pthread_mutex_lock(&gCamLock);
6236
6237 if (mIsMainCamera == 1) {
6238 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6239 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006240 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006241 // related session id should be session id of linked session
6242 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6243 } else {
6244 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6245 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006246 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006247 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6248 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006249 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006250 pthread_mutex_unlock(&gCamLock);
6251
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006252 rc = mCameraHandle->ops->set_dual_cam_cmd(
6253 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006254 if (rc < 0) {
6255 LOGE("Dualcam: Unlink failed, but still proceed to close");
6256 }
6257 }
6258
6259 if (rc < 0) {
6260 LOGE("stopAllChannels failed");
6261 return rc;
6262 }
6263 if (mChannelHandle) {
6264 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006265 mChannelHandle, stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006266 }
6267
6268 // Reset bundle info
6269 rc = setBundleInfo();
6270 if (rc < 0) {
6271 LOGE("setBundleInfo failed %d", rc);
6272 return rc;
6273 }
6274
6275 // Mutex Lock
6276 pthread_mutex_lock(&mMutex);
6277
6278 // Unblock process_capture_request
6279 mPendingLiveRequest = 0;
6280 pthread_cond_signal(&mRequestCond);
6281
6282 rc = notifyErrorForPendingRequests();
6283 if (rc < 0) {
6284 LOGE("notifyErrorForPendingRequests failed");
6285 pthread_mutex_unlock(&mMutex);
6286 return rc;
6287 }
6288
6289 mFlush = false;
6290
6291 // Start the Streams/Channels
6292 if (restartChannels) {
6293 rc = startAllChannels();
6294 if (rc < 0) {
6295 LOGE("startAllChannels failed");
6296 pthread_mutex_unlock(&mMutex);
6297 return rc;
6298 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006299 if (mChannelHandle) {
6300 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006301 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006302 if (rc < 0) {
6303 LOGE("start_channel failed");
6304 pthread_mutex_unlock(&mMutex);
6305 return rc;
6306 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006307 }
6308 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006309 pthread_mutex_unlock(&mMutex);
6310
6311 return 0;
6312}
6313
6314/*===========================================================================
6315 * FUNCTION : flushPerf
6316 *
6317 * DESCRIPTION: This is the performance optimization version of flush that does
6318 * not use stream off, rather flushes the system
6319 *
6320 * PARAMETERS :
6321 *
6322 *
6323 * RETURN : 0 : success
6324 * -EINVAL: input is malformed (device is not valid)
6325 * -ENODEV: if the device has encountered a serious error
6326 *==========================================================================*/
6327int QCamera3HardwareInterface::flushPerf()
6328{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006329 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006330 int32_t rc = 0;
6331 struct timespec timeout;
6332 bool timed_wait = false;
6333
6334 pthread_mutex_lock(&mMutex);
6335 mFlushPerf = true;
6336 mPendingBuffersMap.numPendingBufsAtFlush =
6337 mPendingBuffersMap.get_num_overall_buffers();
6338 LOGD("Calling flush. Wait for %d buffers to return",
6339 mPendingBuffersMap.numPendingBufsAtFlush);
6340
6341 /* send the flush event to the backend */
6342 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6343 if (rc < 0) {
6344 LOGE("Error in flush: IOCTL failure");
6345 mFlushPerf = false;
6346 pthread_mutex_unlock(&mMutex);
6347 return -ENODEV;
6348 }
6349
6350 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6351 LOGD("No pending buffers in HAL, return flush");
6352 mFlushPerf = false;
6353 pthread_mutex_unlock(&mMutex);
6354 return rc;
6355 }
6356
6357 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006358 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006359 if (rc < 0) {
6360 LOGE("Error reading the real time clock, cannot use timed wait");
6361 } else {
6362 timeout.tv_sec += FLUSH_TIMEOUT;
6363 timed_wait = true;
6364 }
6365
6366 //Block on conditional variable
6367 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6368 LOGD("Waiting on mBuffersCond");
6369 if (!timed_wait) {
6370 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6371 if (rc != 0) {
6372 LOGE("pthread_cond_wait failed due to rc = %s",
6373 strerror(rc));
6374 break;
6375 }
6376 } else {
6377 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6378 if (rc != 0) {
6379 LOGE("pthread_cond_timedwait failed due to rc = %s",
6380 strerror(rc));
6381 break;
6382 }
6383 }
6384 }
6385 if (rc != 0) {
6386 mFlushPerf = false;
6387 pthread_mutex_unlock(&mMutex);
6388 return -ENODEV;
6389 }
6390
6391 LOGD("Received buffers, now safe to return them");
6392
6393 //make sure the channels handle flush
6394 //currently only required for the picture channel to release snapshot resources
6395 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6396 it != mStreamInfo.end(); it++) {
6397 QCamera3Channel *channel = (*it)->channel;
6398 if (channel) {
6399 rc = channel->flush();
6400 if (rc) {
6401 LOGE("Flushing the channels failed with error %d", rc);
6402 // even though the channel flush failed we need to continue and
6403 // return the buffers we have to the framework, however the return
6404 // value will be an error
6405 rc = -ENODEV;
6406 }
6407 }
6408 }
6409
6410 /* notify the frameworks and send errored results */
6411 rc = notifyErrorForPendingRequests();
6412 if (rc < 0) {
6413 LOGE("notifyErrorForPendingRequests failed");
6414 pthread_mutex_unlock(&mMutex);
6415 return rc;
6416 }
6417
6418 //unblock process_capture_request
6419 mPendingLiveRequest = 0;
6420 unblockRequestIfNecessary();
6421
6422 mFlushPerf = false;
6423 pthread_mutex_unlock(&mMutex);
6424 LOGD ("Flush Operation complete. rc = %d", rc);
6425 return rc;
6426}
6427
6428/*===========================================================================
6429 * FUNCTION : handleCameraDeviceError
6430 *
6431 * DESCRIPTION: This function calls internal flush and notifies the error to
6432 * framework and updates the state variable.
6433 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006434 * PARAMETERS :
6435 * @stopChannelImmediately : stop channels immediately without waiting for
6436 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006437 *
6438 * RETURN : NO_ERROR on Success
6439 * Error code on failure
6440 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006441int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006442{
6443 int32_t rc = NO_ERROR;
6444
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006445 {
6446 Mutex::Autolock lock(mFlushLock);
6447 pthread_mutex_lock(&mMutex);
6448 if (mState != ERROR) {
6449 //if mState != ERROR, nothing to be done
6450 pthread_mutex_unlock(&mMutex);
6451 return NO_ERROR;
6452 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006453 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006454
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006455 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006456 if (NO_ERROR != rc) {
6457 LOGE("internal flush to handle mState = ERROR failed");
6458 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006459
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006460 pthread_mutex_lock(&mMutex);
6461 mState = DEINIT;
6462 pthread_mutex_unlock(&mMutex);
6463 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006464
6465 camera3_notify_msg_t notify_msg;
6466 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6467 notify_msg.type = CAMERA3_MSG_ERROR;
6468 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6469 notify_msg.message.error.error_stream = NULL;
6470 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006471 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006472
6473 return rc;
6474}
6475
6476/*===========================================================================
6477 * FUNCTION : captureResultCb
6478 *
6479 * DESCRIPTION: Callback handler for all capture result
6480 * (streams, as well as metadata)
6481 *
6482 * PARAMETERS :
6483 * @metadata : metadata information
6484 * @buffer : actual gralloc buffer to be returned to frameworks.
6485 * NULL if metadata.
6486 *
6487 * RETURN : NONE
6488 *==========================================================================*/
6489void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6490 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6491{
6492 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006493 pthread_mutex_lock(&mMutex);
6494 uint8_t batchSize = mBatchSize;
6495 pthread_mutex_unlock(&mMutex);
6496 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006497 handleBatchMetadata(metadata_buf,
6498 true /* free_and_bufdone_meta_buf */);
6499 } else { /* mBatchSize = 0 */
6500 hdrPlusPerfLock(metadata_buf);
6501 pthread_mutex_lock(&mMutex);
6502 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006503 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006504 true /* last urgent frame of batch metadata */,
6505 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006506 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006507 pthread_mutex_unlock(&mMutex);
6508 }
6509 } else if (isInputBuffer) {
6510 pthread_mutex_lock(&mMutex);
6511 handleInputBufferWithLock(frame_number);
6512 pthread_mutex_unlock(&mMutex);
6513 } else {
6514 pthread_mutex_lock(&mMutex);
6515 handleBufferWithLock(buffer, frame_number);
6516 pthread_mutex_unlock(&mMutex);
6517 }
6518 return;
6519}
6520
6521/*===========================================================================
6522 * FUNCTION : getReprocessibleOutputStreamId
6523 *
6524 * DESCRIPTION: Get source output stream id for the input reprocess stream
6525 * based on size and format, which would be the largest
6526 * output stream if an input stream exists.
6527 *
6528 * PARAMETERS :
6529 * @id : return the stream id if found
6530 *
6531 * RETURN : int32_t type of status
6532 * NO_ERROR -- success
6533 * none-zero failure code
6534 *==========================================================================*/
6535int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6536{
6537 /* check if any output or bidirectional stream with the same size and format
6538 and return that stream */
6539 if ((mInputStreamInfo.dim.width > 0) &&
6540 (mInputStreamInfo.dim.height > 0)) {
6541 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6542 it != mStreamInfo.end(); it++) {
6543
6544 camera3_stream_t *stream = (*it)->stream;
6545 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6546 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6547 (stream->format == mInputStreamInfo.format)) {
6548 // Usage flag for an input stream and the source output stream
6549 // may be different.
6550 LOGD("Found reprocessible output stream! %p", *it);
6551 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6552 stream->usage, mInputStreamInfo.usage);
6553
6554 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6555 if (channel != NULL && channel->mStreams[0]) {
6556 id = channel->mStreams[0]->getMyServerID();
6557 return NO_ERROR;
6558 }
6559 }
6560 }
6561 } else {
6562 LOGD("No input stream, so no reprocessible output stream");
6563 }
6564 return NAME_NOT_FOUND;
6565}
6566
6567/*===========================================================================
6568 * FUNCTION : lookupFwkName
6569 *
6570 * DESCRIPTION: In case the enum is not same in fwk and backend
6571 * make sure the parameter is correctly propogated
6572 *
6573 * PARAMETERS :
6574 * @arr : map between the two enums
6575 * @len : len of the map
6576 * @hal_name : name of the hal_parm to map
6577 *
6578 * RETURN : int type of status
6579 * fwk_name -- success
6580 * none-zero failure code
6581 *==========================================================================*/
6582template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6583 size_t len, halType hal_name)
6584{
6585
6586 for (size_t i = 0; i < len; i++) {
6587 if (arr[i].hal_name == hal_name) {
6588 return arr[i].fwk_name;
6589 }
6590 }
6591
6592 /* Not able to find matching framework type is not necessarily
6593 * an error case. This happens when mm-camera supports more attributes
6594 * than the frameworks do */
6595 LOGH("Cannot find matching framework type");
6596 return NAME_NOT_FOUND;
6597}
6598
6599/*===========================================================================
6600 * FUNCTION : lookupHalName
6601 *
6602 * DESCRIPTION: In case the enum is not same in fwk and backend
6603 * make sure the parameter is correctly propogated
6604 *
6605 * PARAMETERS :
6606 * @arr : map between the two enums
6607 * @len : len of the map
6608 * @fwk_name : name of the hal_parm to map
6609 *
6610 * RETURN : int32_t type of status
6611 * hal_name -- success
6612 * none-zero failure code
6613 *==========================================================================*/
6614template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6615 size_t len, fwkType fwk_name)
6616{
6617 for (size_t i = 0; i < len; i++) {
6618 if (arr[i].fwk_name == fwk_name) {
6619 return arr[i].hal_name;
6620 }
6621 }
6622
6623 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6624 return NAME_NOT_FOUND;
6625}
6626
6627/*===========================================================================
6628 * FUNCTION : lookupProp
6629 *
6630 * DESCRIPTION: lookup a value by its name
6631 *
6632 * PARAMETERS :
6633 * @arr : map between the two enums
6634 * @len : size of the map
6635 * @name : name to be looked up
6636 *
6637 * RETURN : Value if found
6638 * CAM_CDS_MODE_MAX if not found
6639 *==========================================================================*/
6640template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6641 size_t len, const char *name)
6642{
6643 if (name) {
6644 for (size_t i = 0; i < len; i++) {
6645 if (!strcmp(arr[i].desc, name)) {
6646 return arr[i].val;
6647 }
6648 }
6649 }
6650 return CAM_CDS_MODE_MAX;
6651}
6652
6653/*===========================================================================
6654 *
6655 * DESCRIPTION:
6656 *
6657 * PARAMETERS :
6658 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006659 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006660 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006661 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6662 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006663 *
6664 * RETURN : camera_metadata_t*
6665 * metadata in a format specified by fwk
6666 *==========================================================================*/
6667camera_metadata_t*
6668QCamera3HardwareInterface::translateFromHalMetadata(
6669 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006670 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006671 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006672 bool lastMetadataInBatch,
6673 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006674{
6675 CameraMetadata camMetadata;
6676 camera_metadata_t *resultMetadata;
6677
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006678 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006679 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6680 * Timestamp is needed because it's used for shutter notify calculation.
6681 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006682 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006683 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006684 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006685 }
6686
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006687 if (pendingRequest.jpegMetadata.entryCount())
6688 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006689
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006690 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6691 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6692 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6693 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6694 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006695 if (mBatchSize == 0) {
6696 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006697 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006698 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006699
Samuel Ha68ba5172016-12-15 18:41:12 -08006700 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6701 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006702 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006703 // DevCamDebug metadata translateFromHalMetadata AF
6704 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6705 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6706 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6707 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6708 }
6709 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6710 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6711 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6712 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6713 }
6714 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6715 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6716 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6717 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6718 }
6719 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6720 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6721 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6722 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6723 }
6724 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6725 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6726 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6727 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6728 }
6729 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6730 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6731 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6732 *DevCamDebug_af_monitor_pdaf_target_pos;
6733 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6734 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6735 }
6736 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6737 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6738 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6739 *DevCamDebug_af_monitor_pdaf_confidence;
6740 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6741 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6742 }
6743 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6744 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6745 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6746 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6747 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6748 }
6749 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6750 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6751 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6752 *DevCamDebug_af_monitor_tof_target_pos;
6753 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6754 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6755 }
6756 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6757 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6758 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6759 *DevCamDebug_af_monitor_tof_confidence;
6760 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6761 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6762 }
6763 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6764 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6765 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6766 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6767 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6768 }
6769 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6770 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6771 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6772 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6773 &fwk_DevCamDebug_af_monitor_type_select, 1);
6774 }
6775 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6776 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6777 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6778 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6779 &fwk_DevCamDebug_af_monitor_refocus, 1);
6780 }
6781 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6782 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6783 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6784 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6785 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6786 }
6787 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6788 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6789 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6790 *DevCamDebug_af_search_pdaf_target_pos;
6791 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6792 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6793 }
6794 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6795 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6796 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6797 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6798 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6799 }
6800 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6801 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6802 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6803 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6804 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6805 }
6806 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6807 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6808 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6809 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6810 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6811 }
6812 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6813 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6814 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6815 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6816 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6817 }
6818 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6819 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6820 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6821 *DevCamDebug_af_search_tof_target_pos;
6822 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6823 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6824 }
6825 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6826 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6827 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6828 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6829 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6830 }
6831 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6832 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6833 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6834 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6835 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6836 }
6837 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6838 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6839 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6840 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6841 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6842 }
6843 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6844 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6845 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6846 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6847 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6848 }
6849 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6850 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6851 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6852 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6853 &fwk_DevCamDebug_af_search_type_select, 1);
6854 }
6855 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6856 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6857 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6858 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6859 &fwk_DevCamDebug_af_search_next_pos, 1);
6860 }
6861 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6862 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6863 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6864 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6865 &fwk_DevCamDebug_af_search_target_pos, 1);
6866 }
6867 // DevCamDebug metadata translateFromHalMetadata AEC
6868 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6869 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6870 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6871 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6872 }
6873 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6874 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6875 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6876 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6877 }
6878 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6879 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6880 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6881 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6882 }
6883 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6884 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6885 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6886 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6887 }
6888 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6889 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6890 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6891 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6892 }
6893 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6894 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6895 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6896 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6897 }
6898 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6899 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6900 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6901 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6902 }
6903 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6904 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6905 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6906 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6907 }
Samuel Ha34229982017-02-17 13:51:11 -08006908 // DevCamDebug metadata translateFromHalMetadata zzHDR
6909 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6910 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6911 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6912 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6913 }
6914 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6915 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006916 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006917 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6918 }
6919 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6920 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6921 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6922 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6923 }
6924 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6925 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006926 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006927 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6928 }
6929 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6930 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6931 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6932 *DevCamDebug_aec_hdr_sensitivity_ratio;
6933 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6934 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6935 }
6936 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6937 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6938 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6939 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6940 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6941 }
6942 // DevCamDebug metadata translateFromHalMetadata ADRC
6943 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6944 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6945 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6946 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6947 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6948 }
6949 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6950 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6951 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6952 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6953 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6954 }
6955 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6956 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6957 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6958 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6959 }
6960 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6961 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6962 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6963 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6964 }
6965 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6966 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6967 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6968 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6969 }
6970 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6971 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6972 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6973 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6974 }
Samuel Habdf4fac2017-07-28 17:21:18 -07006975 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
6976 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
6977 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
6978 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
6979 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
6980 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
6981 }
6982 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
6983 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
6984 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
6985 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
6986 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
6987 }
6988 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
6989 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
6990 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
6991 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
6992 &fwk_DevCamDebug_aec_subject_motion, 1);
6993 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006994 // DevCamDebug metadata translateFromHalMetadata AWB
6995 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6996 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6997 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6998 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6999 }
7000 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7001 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7002 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7003 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7004 }
7005 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7006 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7007 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7008 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7009 }
7010 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7011 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7012 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7013 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7014 }
7015 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7016 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7017 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7018 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7019 }
7020 }
7021 // atrace_end(ATRACE_TAG_ALWAYS);
7022
Thierry Strudel3d639192016-09-09 11:52:26 -07007023 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7024 int64_t fwk_frame_number = *frame_number;
7025 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7026 }
7027
7028 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7029 int32_t fps_range[2];
7030 fps_range[0] = (int32_t)float_range->min_fps;
7031 fps_range[1] = (int32_t)float_range->max_fps;
7032 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7033 fps_range, 2);
7034 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7035 fps_range[0], fps_range[1]);
7036 }
7037
7038 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7039 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7040 }
7041
7042 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7043 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7044 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7045 *sceneMode);
7046 if (NAME_NOT_FOUND != val) {
7047 uint8_t fwkSceneMode = (uint8_t)val;
7048 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7049 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7050 fwkSceneMode);
7051 }
7052 }
7053
7054 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7055 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7056 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7057 }
7058
7059 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7060 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7061 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7062 }
7063
7064 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7065 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7066 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7067 }
7068
7069 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7070 CAM_INTF_META_EDGE_MODE, metadata) {
7071 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7072 }
7073
7074 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7075 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7076 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7077 }
7078
7079 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7080 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7081 }
7082
7083 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7084 if (0 <= *flashState) {
7085 uint8_t fwk_flashState = (uint8_t) *flashState;
7086 if (!gCamCapability[mCameraId]->flash_available) {
7087 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7088 }
7089 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7090 }
7091 }
7092
7093 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7094 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7095 if (NAME_NOT_FOUND != val) {
7096 uint8_t fwk_flashMode = (uint8_t)val;
7097 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7098 }
7099 }
7100
7101 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7102 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7103 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7104 }
7105
7106 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7107 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7108 }
7109
7110 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7111 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7112 }
7113
7114 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7115 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7116 }
7117
7118 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7119 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7120 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7121 }
7122
7123 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7124 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7125 LOGD("fwk_videoStab = %d", fwk_videoStab);
7126 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7127 } else {
7128 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7129 // and so hardcoding the Video Stab result to OFF mode.
7130 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7131 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007132 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007133 }
7134
7135 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7136 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7137 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7138 }
7139
7140 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7141 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7142 }
7143
Thierry Strudel3d639192016-09-09 11:52:26 -07007144 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7145 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007146 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007147
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007148 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7149 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007150
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007151 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007152 blackLevelAppliedPattern->cam_black_level[0],
7153 blackLevelAppliedPattern->cam_black_level[1],
7154 blackLevelAppliedPattern->cam_black_level[2],
7155 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007156 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7157 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007158
7159#ifndef USE_HAL_3_3
7160 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307161 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007162 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307163 fwk_blackLevelInd[0] /= 16.0;
7164 fwk_blackLevelInd[1] /= 16.0;
7165 fwk_blackLevelInd[2] /= 16.0;
7166 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007167 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7168 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007169#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007170 }
7171
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007172#ifndef USE_HAL_3_3
7173 // Fixed whitelevel is used by ISP/Sensor
7174 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7175 &gCamCapability[mCameraId]->white_level, 1);
7176#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007177
7178 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7179 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7180 int32_t scalerCropRegion[4];
7181 scalerCropRegion[0] = hScalerCropRegion->left;
7182 scalerCropRegion[1] = hScalerCropRegion->top;
7183 scalerCropRegion[2] = hScalerCropRegion->width;
7184 scalerCropRegion[3] = hScalerCropRegion->height;
7185
7186 // Adjust crop region from sensor output coordinate system to active
7187 // array coordinate system.
7188 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7189 scalerCropRegion[2], scalerCropRegion[3]);
7190
7191 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7192 }
7193
7194 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7195 LOGD("sensorExpTime = %lld", *sensorExpTime);
7196 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7197 }
7198
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007199 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7200 LOGD("expTimeBoost = %f", *expTimeBoost);
7201 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7202 }
7203
Thierry Strudel3d639192016-09-09 11:52:26 -07007204 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7205 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7206 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7207 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7208 }
7209
7210 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7211 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7212 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7213 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7214 sensorRollingShutterSkew, 1);
7215 }
7216
7217 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7218 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7219 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7220
7221 //calculate the noise profile based on sensitivity
7222 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7223 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7224 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7225 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7226 noise_profile[i] = noise_profile_S;
7227 noise_profile[i+1] = noise_profile_O;
7228 }
7229 LOGD("noise model entry (S, O) is (%f, %f)",
7230 noise_profile_S, noise_profile_O);
7231 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7232 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7233 }
7234
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007235#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007236 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007237 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007238 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007239 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007240 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7241 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7242 }
7243 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007244#endif
7245
Thierry Strudel3d639192016-09-09 11:52:26 -07007246 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7247 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7248 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7249 }
7250
7251 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7252 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7253 *faceDetectMode);
7254 if (NAME_NOT_FOUND != val) {
7255 uint8_t fwk_faceDetectMode = (uint8_t)val;
7256 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7257
7258 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7259 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7260 CAM_INTF_META_FACE_DETECTION, metadata) {
7261 uint8_t numFaces = MIN(
7262 faceDetectionInfo->num_faces_detected, MAX_ROI);
7263 int32_t faceIds[MAX_ROI];
7264 uint8_t faceScores[MAX_ROI];
7265 int32_t faceRectangles[MAX_ROI * 4];
7266 int32_t faceLandmarks[MAX_ROI * 6];
7267 size_t j = 0, k = 0;
7268
7269 for (size_t i = 0; i < numFaces; i++) {
7270 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7271 // Adjust crop region from sensor output coordinate system to active
7272 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007273 cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
Thierry Strudel3d639192016-09-09 11:52:26 -07007274 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7275 rect.width, rect.height);
7276
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007277 convertToRegions(rect, faceRectangles+j, -1);
Thierry Strudel3d639192016-09-09 11:52:26 -07007278
Jason Lee8ce36fa2017-04-19 19:40:37 -07007279 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7280 "bottom-right (%d, %d)",
7281 faceDetectionInfo->frame_id, i,
7282 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7283 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7284
Thierry Strudel3d639192016-09-09 11:52:26 -07007285 j+= 4;
7286 }
7287 if (numFaces <= 0) {
7288 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7289 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7290 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7291 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7292 }
7293
7294 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7295 numFaces);
7296 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7297 faceRectangles, numFaces * 4U);
7298 if (fwk_faceDetectMode ==
7299 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7300 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7301 CAM_INTF_META_FACE_LANDMARK, metadata) {
7302
7303 for (size_t i = 0; i < numFaces; i++) {
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007304 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
Thierry Strudel3d639192016-09-09 11:52:26 -07007305 // Map the co-ordinate sensor output coordinate system to active
7306 // array coordinate system.
7307 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007308 face_landmarks.left_eye_center.x,
7309 face_landmarks.left_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007310 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007311 face_landmarks.right_eye_center.x,
7312 face_landmarks.right_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007313 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007314 face_landmarks.mouth_center.x,
7315 face_landmarks.mouth_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007316
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007317 convertLandmarks(face_landmarks, faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007318
7319 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7320 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7321 faceDetectionInfo->frame_id, i,
7322 faceLandmarks[k + LEFT_EYE_X],
7323 faceLandmarks[k + LEFT_EYE_Y],
7324 faceLandmarks[k + RIGHT_EYE_X],
7325 faceLandmarks[k + RIGHT_EYE_Y],
7326 faceLandmarks[k + MOUTH_X],
7327 faceLandmarks[k + MOUTH_Y]);
7328
Thierry Strudel04e026f2016-10-10 11:27:36 -07007329 k+= TOTAL_LANDMARK_INDICES;
7330 }
7331 } else {
7332 for (size_t i = 0; i < numFaces; i++) {
7333 setInvalidLandmarks(faceLandmarks+k);
7334 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007335 }
7336 }
7337
Jason Lee49619db2017-04-13 12:07:22 -07007338 for (size_t i = 0; i < numFaces; i++) {
7339 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7340
7341 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7342 faceDetectionInfo->frame_id, i, faceIds[i]);
7343 }
7344
Thierry Strudel3d639192016-09-09 11:52:26 -07007345 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7346 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7347 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007348 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007349 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7350 CAM_INTF_META_FACE_BLINK, metadata) {
7351 uint8_t detected[MAX_ROI];
7352 uint8_t degree[MAX_ROI * 2];
7353 for (size_t i = 0; i < numFaces; i++) {
7354 detected[i] = blinks->blink[i].blink_detected;
7355 degree[2 * i] = blinks->blink[i].left_blink;
7356 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007357
Jason Lee49619db2017-04-13 12:07:22 -07007358 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7359 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7360 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7361 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007362 }
7363 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7364 detected, numFaces);
7365 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7366 degree, numFaces * 2);
7367 }
7368 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7369 CAM_INTF_META_FACE_SMILE, metadata) {
7370 uint8_t degree[MAX_ROI];
7371 uint8_t confidence[MAX_ROI];
7372 for (size_t i = 0; i < numFaces; i++) {
7373 degree[i] = smiles->smile[i].smile_degree;
7374 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007375
Jason Lee49619db2017-04-13 12:07:22 -07007376 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7377 "smile_degree=%d, smile_score=%d",
7378 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007379 }
7380 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7381 degree, numFaces);
7382 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7383 confidence, numFaces);
7384 }
7385 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7386 CAM_INTF_META_FACE_GAZE, metadata) {
7387 int8_t angle[MAX_ROI];
7388 int32_t direction[MAX_ROI * 3];
7389 int8_t degree[MAX_ROI * 2];
7390 for (size_t i = 0; i < numFaces; i++) {
7391 angle[i] = gazes->gaze[i].gaze_angle;
7392 direction[3 * i] = gazes->gaze[i].updown_dir;
7393 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7394 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7395 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7396 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007397
7398 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7399 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7400 "left_right_gaze=%d, top_bottom_gaze=%d",
7401 faceDetectionInfo->frame_id, i, angle[i],
7402 direction[3 * i], direction[3 * i + 1],
7403 direction[3 * i + 2],
7404 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007405 }
7406 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7407 (uint8_t *)angle, numFaces);
7408 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7409 direction, numFaces * 3);
7410 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7411 (uint8_t *)degree, numFaces * 2);
7412 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007413 }
7414 }
7415 }
7416 }
7417
7418 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7419 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007420 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007421 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007422 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007423
Shuzhen Wang14415f52016-11-16 18:26:18 -08007424 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7425 histogramBins = *histBins;
7426 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7427 }
7428
7429 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007430 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7431 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007432 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007433
7434 switch (stats_data->type) {
7435 case CAM_HISTOGRAM_TYPE_BAYER:
7436 switch (stats_data->bayer_stats.data_type) {
7437 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007438 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7439 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007440 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007441 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7442 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007443 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007444 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7445 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007446 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007447 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007448 case CAM_STATS_CHANNEL_R:
7449 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007450 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7451 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007452 }
7453 break;
7454 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007455 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007456 break;
7457 }
7458
Shuzhen Wang14415f52016-11-16 18:26:18 -08007459 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007460 }
7461 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007462 }
7463
7464 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7465 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7466 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7467 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7468 }
7469
7470 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7471 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7472 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7473 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7474 }
7475
7476 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7477 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7478 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7479 CAM_MAX_SHADING_MAP_HEIGHT);
7480 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7481 CAM_MAX_SHADING_MAP_WIDTH);
7482 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7483 lensShadingMap->lens_shading, 4U * map_width * map_height);
7484 }
7485
7486 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7487 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7488 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7489 }
7490
7491 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7492 //Populate CAM_INTF_META_TONEMAP_CURVES
7493 /* ch0 = G, ch 1 = B, ch 2 = R*/
7494 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7495 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7496 tonemap->tonemap_points_cnt,
7497 CAM_MAX_TONEMAP_CURVE_SIZE);
7498 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7499 }
7500
7501 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7502 &tonemap->curves[0].tonemap_points[0][0],
7503 tonemap->tonemap_points_cnt * 2);
7504
7505 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7506 &tonemap->curves[1].tonemap_points[0][0],
7507 tonemap->tonemap_points_cnt * 2);
7508
7509 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7510 &tonemap->curves[2].tonemap_points[0][0],
7511 tonemap->tonemap_points_cnt * 2);
7512 }
7513
7514 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7515 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7516 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7517 CC_GAIN_MAX);
7518 }
7519
7520 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7521 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7522 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7523 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7524 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7525 }
7526
7527 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7528 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7529 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7530 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7531 toneCurve->tonemap_points_cnt,
7532 CAM_MAX_TONEMAP_CURVE_SIZE);
7533 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7534 }
7535 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7536 (float*)toneCurve->curve.tonemap_points,
7537 toneCurve->tonemap_points_cnt * 2);
7538 }
7539
7540 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7541 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7542 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7543 predColorCorrectionGains->gains, 4);
7544 }
7545
7546 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7547 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7548 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7549 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7550 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7551 }
7552
7553 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7554 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7555 }
7556
7557 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7558 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7559 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7560 }
7561
7562 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7563 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7564 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7565 }
7566
7567 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7568 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7569 *effectMode);
7570 if (NAME_NOT_FOUND != val) {
7571 uint8_t fwk_effectMode = (uint8_t)val;
7572 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7573 }
7574 }
7575
7576 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7577 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7578 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7579 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7580 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7581 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7582 }
7583 int32_t fwk_testPatternData[4];
7584 fwk_testPatternData[0] = testPatternData->r;
7585 fwk_testPatternData[3] = testPatternData->b;
7586 switch (gCamCapability[mCameraId]->color_arrangement) {
7587 case CAM_FILTER_ARRANGEMENT_RGGB:
7588 case CAM_FILTER_ARRANGEMENT_GRBG:
7589 fwk_testPatternData[1] = testPatternData->gr;
7590 fwk_testPatternData[2] = testPatternData->gb;
7591 break;
7592 case CAM_FILTER_ARRANGEMENT_GBRG:
7593 case CAM_FILTER_ARRANGEMENT_BGGR:
7594 fwk_testPatternData[2] = testPatternData->gr;
7595 fwk_testPatternData[1] = testPatternData->gb;
7596 break;
7597 default:
7598 LOGE("color arrangement %d is not supported",
7599 gCamCapability[mCameraId]->color_arrangement);
7600 break;
7601 }
7602 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7603 }
7604
7605 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7606 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7607 }
7608
7609 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7610 String8 str((const char *)gps_methods);
7611 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7612 }
7613
7614 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7615 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7616 }
7617
7618 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7619 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7620 }
7621
7622 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7623 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7624 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7625 }
7626
7627 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7628 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7629 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7630 }
7631
7632 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7633 int32_t fwk_thumb_size[2];
7634 fwk_thumb_size[0] = thumb_size->width;
7635 fwk_thumb_size[1] = thumb_size->height;
7636 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7637 }
7638
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007639 // Skip reprocess metadata if there is no input stream.
7640 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7641 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7642 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7643 privateData,
7644 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7645 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007646 }
7647
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007648 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007649 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007650 meteringMode, 1);
7651 }
7652
Thierry Strudel54dc9782017-02-15 12:12:10 -08007653 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7654 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7655 LOGD("hdr_scene_data: %d %f\n",
7656 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7657 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7658 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7659 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7660 &isHdr, 1);
7661 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7662 &isHdrConfidence, 1);
7663 }
7664
7665
7666
Thierry Strudel3d639192016-09-09 11:52:26 -07007667 if (metadata->is_tuning_params_valid) {
7668 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7669 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7670 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7671
7672
7673 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7674 sizeof(uint32_t));
7675 data += sizeof(uint32_t);
7676
7677 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7678 sizeof(uint32_t));
7679 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7680 data += sizeof(uint32_t);
7681
7682 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7683 sizeof(uint32_t));
7684 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7685 data += sizeof(uint32_t);
7686
7687 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7688 sizeof(uint32_t));
7689 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7690 data += sizeof(uint32_t);
7691
7692 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7693 sizeof(uint32_t));
7694 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7695 data += sizeof(uint32_t);
7696
7697 metadata->tuning_params.tuning_mod3_data_size = 0;
7698 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7699 sizeof(uint32_t));
7700 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7701 data += sizeof(uint32_t);
7702
7703 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7704 TUNING_SENSOR_DATA_MAX);
7705 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7706 count);
7707 data += count;
7708
7709 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7710 TUNING_VFE_DATA_MAX);
7711 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7712 count);
7713 data += count;
7714
7715 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7716 TUNING_CPP_DATA_MAX);
7717 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7718 count);
7719 data += count;
7720
7721 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7722 TUNING_CAC_DATA_MAX);
7723 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7724 count);
7725 data += count;
7726
7727 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7728 (int32_t *)(void *)tuning_meta_data_blob,
7729 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7730 }
7731
7732 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7733 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7734 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7735 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7736 NEUTRAL_COL_POINTS);
7737 }
7738
7739 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7740 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7741 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7742 }
7743
7744 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7745 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7746 // Adjust crop region from sensor output coordinate system to active
7747 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007748 cam_rect_t hAeRect = hAeRegions->rect;
7749 mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7750 hAeRect.width, hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007751
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007752 convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
Thierry Strudel3d639192016-09-09 11:52:26 -07007753 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7754 REGIONS_TUPLE_COUNT);
7755 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7756 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007757 hAeRect.left, hAeRect.top, hAeRect.width,
7758 hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007759 }
7760
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007761 if (!pendingRequest.focusStateSent) {
7762 if (pendingRequest.focusStateValid) {
7763 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7764 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007765 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007766 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7767 uint8_t fwk_afState = (uint8_t) *afState;
7768 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7769 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7770 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007771 }
7772 }
7773
Thierry Strudel3d639192016-09-09 11:52:26 -07007774 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7775 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7776 }
7777
7778 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7779 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7780 }
7781
7782 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7783 uint8_t fwk_lensState = *lensState;
7784 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7785 }
7786
Thierry Strudel3d639192016-09-09 11:52:26 -07007787 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007788 uint32_t ab_mode = *hal_ab_mode;
7789 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7790 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7791 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7792 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007793 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007794 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007795 if (NAME_NOT_FOUND != val) {
7796 uint8_t fwk_ab_mode = (uint8_t)val;
7797 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7798 }
7799 }
7800
7801 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7802 int val = lookupFwkName(SCENE_MODES_MAP,
7803 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7804 if (NAME_NOT_FOUND != val) {
7805 uint8_t fwkBestshotMode = (uint8_t)val;
7806 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7807 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7808 } else {
7809 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7810 }
7811 }
7812
7813 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7814 uint8_t fwk_mode = (uint8_t) *mode;
7815 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7816 }
7817
7818 /* Constant metadata values to be update*/
7819 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7820 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7821
7822 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7823 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7824
7825 int32_t hotPixelMap[2];
7826 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7827
7828 // CDS
7829 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7830 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7831 }
7832
Thierry Strudel04e026f2016-10-10 11:27:36 -07007833 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7834 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007835 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007836 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7837 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7838 } else {
7839 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7840 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007841
7842 if(fwk_hdr != curr_hdr_state) {
7843 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7844 if(fwk_hdr)
7845 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7846 else
7847 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7848 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007849 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7850 }
7851
Thierry Strudel54dc9782017-02-15 12:12:10 -08007852 //binning correction
7853 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7854 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7855 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7856 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7857 }
7858
Thierry Strudel04e026f2016-10-10 11:27:36 -07007859 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007860 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007861 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7862 int8_t is_ir_on = 0;
7863
7864 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7865 if(is_ir_on != curr_ir_state) {
7866 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7867 if(is_ir_on)
7868 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7869 else
7870 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7871 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007872 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007873 }
7874
Thierry Strudel269c81a2016-10-12 12:13:59 -07007875 // AEC SPEED
7876 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7877 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7878 }
7879
7880 // AWB SPEED
7881 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7882 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7883 }
7884
Thierry Strudel3d639192016-09-09 11:52:26 -07007885 // TNR
7886 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7887 uint8_t tnr_enable = tnr->denoise_enable;
7888 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007889 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7890 int8_t is_tnr_on = 0;
7891
7892 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7893 if(is_tnr_on != curr_tnr_state) {
7894 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7895 if(is_tnr_on)
7896 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7897 else
7898 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7899 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007900
7901 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7902 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7903 }
7904
7905 // Reprocess crop data
7906 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7907 uint8_t cnt = crop_data->num_of_streams;
7908 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7909 // mm-qcamera-daemon only posts crop_data for streams
7910 // not linked to pproc. So no valid crop metadata is not
7911 // necessarily an error case.
7912 LOGD("No valid crop metadata entries");
7913 } else {
7914 uint32_t reproc_stream_id;
7915 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7916 LOGD("No reprocessible stream found, ignore crop data");
7917 } else {
7918 int rc = NO_ERROR;
7919 Vector<int32_t> roi_map;
7920 int32_t *crop = new int32_t[cnt*4];
7921 if (NULL == crop) {
7922 rc = NO_MEMORY;
7923 }
7924 if (NO_ERROR == rc) {
7925 int32_t streams_found = 0;
7926 for (size_t i = 0; i < cnt; i++) {
7927 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7928 if (pprocDone) {
7929 // HAL already does internal reprocessing,
7930 // either via reprocessing before JPEG encoding,
7931 // or offline postprocessing for pproc bypass case.
7932 crop[0] = 0;
7933 crop[1] = 0;
7934 crop[2] = mInputStreamInfo.dim.width;
7935 crop[3] = mInputStreamInfo.dim.height;
7936 } else {
7937 crop[0] = crop_data->crop_info[i].crop.left;
7938 crop[1] = crop_data->crop_info[i].crop.top;
7939 crop[2] = crop_data->crop_info[i].crop.width;
7940 crop[3] = crop_data->crop_info[i].crop.height;
7941 }
7942 roi_map.add(crop_data->crop_info[i].roi_map.left);
7943 roi_map.add(crop_data->crop_info[i].roi_map.top);
7944 roi_map.add(crop_data->crop_info[i].roi_map.width);
7945 roi_map.add(crop_data->crop_info[i].roi_map.height);
7946 streams_found++;
7947 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7948 crop[0], crop[1], crop[2], crop[3]);
7949 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7950 crop_data->crop_info[i].roi_map.left,
7951 crop_data->crop_info[i].roi_map.top,
7952 crop_data->crop_info[i].roi_map.width,
7953 crop_data->crop_info[i].roi_map.height);
7954 break;
7955
7956 }
7957 }
7958 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7959 &streams_found, 1);
7960 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7961 crop, (size_t)(streams_found * 4));
7962 if (roi_map.array()) {
7963 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7964 roi_map.array(), roi_map.size());
7965 }
7966 }
7967 if (crop) {
7968 delete [] crop;
7969 }
7970 }
7971 }
7972 }
7973
7974 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7975 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7976 // so hardcoding the CAC result to OFF mode.
7977 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7978 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7979 } else {
7980 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7981 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7982 *cacMode);
7983 if (NAME_NOT_FOUND != val) {
7984 uint8_t resultCacMode = (uint8_t)val;
7985 // check whether CAC result from CB is equal to Framework set CAC mode
7986 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007987 if (pendingRequest.fwkCacMode != resultCacMode) {
7988 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07007989 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007990 //Check if CAC is disabled by property
7991 if (m_cacModeDisabled) {
7992 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7993 }
7994
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007995 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007996 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7997 } else {
7998 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7999 }
8000 }
8001 }
8002
8003 // Post blob of cam_cds_data through vendor tag.
8004 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8005 uint8_t cnt = cdsInfo->num_of_streams;
8006 cam_cds_data_t cdsDataOverride;
8007 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8008 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8009 cdsDataOverride.num_of_streams = 1;
8010 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8011 uint32_t reproc_stream_id;
8012 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8013 LOGD("No reprocessible stream found, ignore cds data");
8014 } else {
8015 for (size_t i = 0; i < cnt; i++) {
8016 if (cdsInfo->cds_info[i].stream_id ==
8017 reproc_stream_id) {
8018 cdsDataOverride.cds_info[0].cds_enable =
8019 cdsInfo->cds_info[i].cds_enable;
8020 break;
8021 }
8022 }
8023 }
8024 } else {
8025 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8026 }
8027 camMetadata.update(QCAMERA3_CDS_INFO,
8028 (uint8_t *)&cdsDataOverride,
8029 sizeof(cam_cds_data_t));
8030 }
8031
8032 // Ldaf calibration data
8033 if (!mLdafCalibExist) {
8034 IF_META_AVAILABLE(uint32_t, ldafCalib,
8035 CAM_INTF_META_LDAF_EXIF, metadata) {
8036 mLdafCalibExist = true;
8037 mLdafCalib[0] = ldafCalib[0];
8038 mLdafCalib[1] = ldafCalib[1];
8039 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8040 ldafCalib[0], ldafCalib[1]);
8041 }
8042 }
8043
Thierry Strudel54dc9782017-02-15 12:12:10 -08008044 // EXIF debug data through vendor tag
8045 /*
8046 * Mobicat Mask can assume 3 values:
8047 * 1 refers to Mobicat data,
8048 * 2 refers to Stats Debug and Exif Debug Data
8049 * 3 refers to Mobicat and Stats Debug Data
8050 * We want to make sure that we are sending Exif debug data
8051 * only when Mobicat Mask is 2.
8052 */
8053 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8054 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8055 (uint8_t *)(void *)mExifParams.debug_params,
8056 sizeof(mm_jpeg_debug_exif_params_t));
8057 }
8058
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008059 // Reprocess and DDM debug data through vendor tag
8060 cam_reprocess_info_t repro_info;
8061 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008062 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8063 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008064 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008065 }
8066 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8067 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008068 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008069 }
8070 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8071 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008072 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008073 }
8074 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8075 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008076 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008077 }
8078 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8079 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008080 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008081 }
8082 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008083 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008084 }
8085 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8086 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008087 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008088 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008089 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8090 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8091 }
8092 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8093 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8094 }
8095 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8096 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008097
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008098 // INSTANT AEC MODE
8099 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8100 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8101 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8102 }
8103
Shuzhen Wange763e802016-03-31 10:24:29 -07008104 // AF scene change
8105 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8106 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8107 }
8108
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008109 // Enable ZSL
8110 if (enableZsl != nullptr) {
8111 uint8_t value = *enableZsl ?
8112 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8113 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8114 }
8115
Xu Han821ea9c2017-05-23 09:00:40 -07008116 // OIS Data
8117 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8118 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8119 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8120 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8121 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8122 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8123 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8124 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8125 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8126 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8127 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
Xue Tu2c3e9142017-08-18 16:23:52 -07008128 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8129 frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8130 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8131 frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
Xu Han821ea9c2017-05-23 09:00:40 -07008132 }
8133
Thierry Strudel3d639192016-09-09 11:52:26 -07008134 resultMetadata = camMetadata.release();
8135 return resultMetadata;
8136}
8137
8138/*===========================================================================
8139 * FUNCTION : saveExifParams
8140 *
8141 * DESCRIPTION:
8142 *
8143 * PARAMETERS :
8144 * @metadata : metadata information from callback
8145 *
8146 * RETURN : none
8147 *
8148 *==========================================================================*/
8149void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8150{
8151 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8152 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8153 if (mExifParams.debug_params) {
8154 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8155 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8156 }
8157 }
8158 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8159 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8160 if (mExifParams.debug_params) {
8161 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8162 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8163 }
8164 }
8165 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8166 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8167 if (mExifParams.debug_params) {
8168 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8169 mExifParams.debug_params->af_debug_params_valid = TRUE;
8170 }
8171 }
8172 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8173 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8174 if (mExifParams.debug_params) {
8175 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8176 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8177 }
8178 }
8179 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8180 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8181 if (mExifParams.debug_params) {
8182 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8183 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8184 }
8185 }
8186 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8187 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8188 if (mExifParams.debug_params) {
8189 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8190 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8191 }
8192 }
8193 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8194 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8195 if (mExifParams.debug_params) {
8196 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8197 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8198 }
8199 }
8200 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8201 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8202 if (mExifParams.debug_params) {
8203 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8204 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8205 }
8206 }
8207}
8208
8209/*===========================================================================
8210 * FUNCTION : get3AExifParams
8211 *
8212 * DESCRIPTION:
8213 *
8214 * PARAMETERS : none
8215 *
8216 *
8217 * RETURN : mm_jpeg_exif_params_t
8218 *
8219 *==========================================================================*/
8220mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8221{
8222 return mExifParams;
8223}
8224
8225/*===========================================================================
8226 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8227 *
8228 * DESCRIPTION:
8229 *
8230 * PARAMETERS :
8231 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008232 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8233 * urgent metadata in a batch. Always true for
8234 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008235 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008236 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8237 * i.e. even though it doesn't map to a valid partial
8238 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008239 * RETURN : camera_metadata_t*
8240 * metadata in a format specified by fwk
8241 *==========================================================================*/
8242camera_metadata_t*
8243QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008244 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008245 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008246{
8247 CameraMetadata camMetadata;
8248 camera_metadata_t *resultMetadata;
8249
Shuzhen Wang485e2442017-08-02 12:21:08 -07008250 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008251 /* In batch mode, use empty metadata if this is not the last in batch
8252 */
8253 resultMetadata = allocate_camera_metadata(0, 0);
8254 return resultMetadata;
8255 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008256
8257 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8258 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8259 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8260 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8261 }
8262
8263 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8264 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8265 &aecTrigger->trigger, 1);
8266 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8267 &aecTrigger->trigger_id, 1);
8268 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8269 aecTrigger->trigger);
8270 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8271 aecTrigger->trigger_id);
8272 }
8273
8274 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8275 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8276 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8277 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8278 }
8279
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008280 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8281 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8282 if (NAME_NOT_FOUND != val) {
8283 uint8_t fwkAfMode = (uint8_t)val;
8284 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8285 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8286 } else {
8287 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8288 val);
8289 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008290 }
8291
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008292 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8293 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8294 af_trigger->trigger);
8295 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8296 af_trigger->trigger_id);
8297
8298 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8299 mAfTrigger = *af_trigger;
8300 uint32_t fwk_AfState = (uint32_t) *afState;
8301
8302 // If this is the result for a new trigger, check if there is new early
8303 // af state. If there is, use the last af state for all results
8304 // preceding current partial frame number.
8305 for (auto & pendingRequest : mPendingRequestsList) {
8306 if (pendingRequest.frame_number < frame_number) {
8307 pendingRequest.focusStateValid = true;
8308 pendingRequest.focusState = fwk_AfState;
8309 } else if (pendingRequest.frame_number == frame_number) {
8310 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8311 // Check if early AF state for trigger exists. If yes, send AF state as
8312 // partial result for better latency.
8313 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8314 pendingRequest.focusStateSent = true;
8315 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8316 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8317 frame_number, fwkEarlyAfState);
8318 }
8319 }
8320 }
8321 }
8322 }
8323 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8324 &mAfTrigger.trigger, 1);
8325 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8326
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008327 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8328 /*af regions*/
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008329 cam_rect_t hAfRect = hAfRegions->rect;
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008330 int32_t afRegions[REGIONS_TUPLE_COUNT];
8331 // Adjust crop region from sensor output coordinate system to active
8332 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008333 mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8334 hAfRect.width, hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008335
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008336 convertToRegions(hAfRect, afRegions, hAfRegions->weight);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008337 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8338 REGIONS_TUPLE_COUNT);
8339 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8340 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008341 hAfRect.left, hAfRect.top, hAfRect.width,
8342 hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008343 }
8344
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008345 // AF region confidence
8346 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8347 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8348 }
8349
Thierry Strudel3d639192016-09-09 11:52:26 -07008350 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8351 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8352 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8353 if (NAME_NOT_FOUND != val) {
8354 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8355 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8356 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8357 } else {
8358 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8359 }
8360 }
8361
8362 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8363 uint32_t aeMode = CAM_AE_MODE_MAX;
8364 int32_t flashMode = CAM_FLASH_MODE_MAX;
8365 int32_t redeye = -1;
8366 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8367 aeMode = *pAeMode;
8368 }
8369 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8370 flashMode = *pFlashMode;
8371 }
8372 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8373 redeye = *pRedeye;
8374 }
8375
8376 if (1 == redeye) {
8377 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8378 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8379 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8380 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8381 flashMode);
8382 if (NAME_NOT_FOUND != val) {
8383 fwk_aeMode = (uint8_t)val;
8384 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8385 } else {
8386 LOGE("Unsupported flash mode %d", flashMode);
8387 }
8388 } else if (aeMode == CAM_AE_MODE_ON) {
8389 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8390 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8391 } else if (aeMode == CAM_AE_MODE_OFF) {
8392 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8393 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008394 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8395 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8396 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008397 } else {
8398 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8399 "flashMode:%d, aeMode:%u!!!",
8400 redeye, flashMode, aeMode);
8401 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008402 if (mInstantAEC) {
8403 // Increment frame Idx count untill a bound reached for instant AEC.
8404 mInstantAecFrameIdxCount++;
8405 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8406 CAM_INTF_META_AEC_INFO, metadata) {
8407 LOGH("ae_params->settled = %d",ae_params->settled);
8408 // If AEC settled, or if number of frames reached bound value,
8409 // should reset instant AEC.
8410 if (ae_params->settled ||
8411 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8412 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8413 mInstantAEC = false;
8414 mResetInstantAEC = true;
8415 mInstantAecFrameIdxCount = 0;
8416 }
8417 }
8418 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008419 resultMetadata = camMetadata.release();
8420 return resultMetadata;
8421}
8422
8423/*===========================================================================
8424 * FUNCTION : dumpMetadataToFile
8425 *
8426 * DESCRIPTION: Dumps tuning metadata to file system
8427 *
8428 * PARAMETERS :
8429 * @meta : tuning metadata
8430 * @dumpFrameCount : current dump frame count
8431 * @enabled : Enable mask
8432 *
8433 *==========================================================================*/
8434void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8435 uint32_t &dumpFrameCount,
8436 bool enabled,
8437 const char *type,
8438 uint32_t frameNumber)
8439{
8440 //Some sanity checks
8441 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8442 LOGE("Tuning sensor data size bigger than expected %d: %d",
8443 meta.tuning_sensor_data_size,
8444 TUNING_SENSOR_DATA_MAX);
8445 return;
8446 }
8447
8448 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8449 LOGE("Tuning VFE data size bigger than expected %d: %d",
8450 meta.tuning_vfe_data_size,
8451 TUNING_VFE_DATA_MAX);
8452 return;
8453 }
8454
8455 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8456 LOGE("Tuning CPP data size bigger than expected %d: %d",
8457 meta.tuning_cpp_data_size,
8458 TUNING_CPP_DATA_MAX);
8459 return;
8460 }
8461
8462 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8463 LOGE("Tuning CAC data size bigger than expected %d: %d",
8464 meta.tuning_cac_data_size,
8465 TUNING_CAC_DATA_MAX);
8466 return;
8467 }
8468 //
8469
8470 if(enabled){
8471 char timeBuf[FILENAME_MAX];
8472 char buf[FILENAME_MAX];
8473 memset(buf, 0, sizeof(buf));
8474 memset(timeBuf, 0, sizeof(timeBuf));
8475 time_t current_time;
8476 struct tm * timeinfo;
8477 time (&current_time);
8478 timeinfo = localtime (&current_time);
8479 if (timeinfo != NULL) {
8480 strftime (timeBuf, sizeof(timeBuf),
8481 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8482 }
8483 String8 filePath(timeBuf);
8484 snprintf(buf,
8485 sizeof(buf),
8486 "%dm_%s_%d.bin",
8487 dumpFrameCount,
8488 type,
8489 frameNumber);
8490 filePath.append(buf);
8491 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8492 if (file_fd >= 0) {
8493 ssize_t written_len = 0;
8494 meta.tuning_data_version = TUNING_DATA_VERSION;
8495 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8496 written_len += write(file_fd, data, sizeof(uint32_t));
8497 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8498 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8499 written_len += write(file_fd, data, sizeof(uint32_t));
8500 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8501 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8502 written_len += write(file_fd, data, sizeof(uint32_t));
8503 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8504 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8505 written_len += write(file_fd, data, sizeof(uint32_t));
8506 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8507 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8508 written_len += write(file_fd, data, sizeof(uint32_t));
8509 meta.tuning_mod3_data_size = 0;
8510 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8511 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8512 written_len += write(file_fd, data, sizeof(uint32_t));
8513 size_t total_size = meta.tuning_sensor_data_size;
8514 data = (void *)((uint8_t *)&meta.data);
8515 written_len += write(file_fd, data, total_size);
8516 total_size = meta.tuning_vfe_data_size;
8517 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8518 written_len += write(file_fd, data, total_size);
8519 total_size = meta.tuning_cpp_data_size;
8520 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8521 written_len += write(file_fd, data, total_size);
8522 total_size = meta.tuning_cac_data_size;
8523 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8524 written_len += write(file_fd, data, total_size);
8525 close(file_fd);
8526 }else {
8527 LOGE("fail to open file for metadata dumping");
8528 }
8529 }
8530}
8531
8532/*===========================================================================
8533 * FUNCTION : cleanAndSortStreamInfo
8534 *
8535 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8536 * and sort them such that raw stream is at the end of the list
8537 * This is a workaround for camera daemon constraint.
8538 *
8539 * PARAMETERS : None
8540 *
8541 *==========================================================================*/
8542void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8543{
8544 List<stream_info_t *> newStreamInfo;
8545
8546 /*clean up invalid streams*/
8547 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8548 it != mStreamInfo.end();) {
8549 if(((*it)->status) == INVALID){
8550 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8551 delete channel;
8552 free(*it);
8553 it = mStreamInfo.erase(it);
8554 } else {
8555 it++;
8556 }
8557 }
8558
8559 // Move preview/video/callback/snapshot streams into newList
8560 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8561 it != mStreamInfo.end();) {
8562 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8563 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8564 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8565 newStreamInfo.push_back(*it);
8566 it = mStreamInfo.erase(it);
8567 } else
8568 it++;
8569 }
8570 // Move raw streams into newList
8571 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8572 it != mStreamInfo.end();) {
8573 newStreamInfo.push_back(*it);
8574 it = mStreamInfo.erase(it);
8575 }
8576
8577 mStreamInfo = newStreamInfo;
8578}
8579
8580/*===========================================================================
8581 * FUNCTION : extractJpegMetadata
8582 *
8583 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8584 * JPEG metadata is cached in HAL, and return as part of capture
8585 * result when metadata is returned from camera daemon.
8586 *
8587 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8588 * @request: capture request
8589 *
8590 *==========================================================================*/
8591void QCamera3HardwareInterface::extractJpegMetadata(
8592 CameraMetadata& jpegMetadata,
8593 const camera3_capture_request_t *request)
8594{
8595 CameraMetadata frame_settings;
8596 frame_settings = request->settings;
8597
8598 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8599 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8600 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8601 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8602
8603 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8604 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8605 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8606 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8607
8608 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8609 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8610 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8611 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8612
8613 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8614 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8615 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8616 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8617
8618 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8619 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8620 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8621 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8622
8623 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8624 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8625 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8626 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8627
8628 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8629 int32_t thumbnail_size[2];
8630 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8631 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8632 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8633 int32_t orientation =
8634 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008635 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008636 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8637 int32_t temp;
8638 temp = thumbnail_size[0];
8639 thumbnail_size[0] = thumbnail_size[1];
8640 thumbnail_size[1] = temp;
8641 }
8642 }
8643 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8644 thumbnail_size,
8645 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8646 }
8647
8648}
8649
8650/*===========================================================================
8651 * FUNCTION : convertToRegions
8652 *
8653 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8654 *
8655 * PARAMETERS :
8656 * @rect : cam_rect_t struct to convert
8657 * @region : int32_t destination array
8658 * @weight : if we are converting from cam_area_t, weight is valid
8659 * else weight = -1
8660 *
8661 *==========================================================================*/
8662void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8663 int32_t *region, int weight)
8664{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008665 region[FACE_LEFT] = rect.left;
8666 region[FACE_TOP] = rect.top;
8667 region[FACE_RIGHT] = rect.left + rect.width;
8668 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008669 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008670 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008671 }
8672}
8673
8674/*===========================================================================
8675 * FUNCTION : convertFromRegions
8676 *
8677 * DESCRIPTION: helper method to convert from array to cam_rect_t
8678 *
8679 * PARAMETERS :
8680 * @rect : cam_rect_t struct to convert
8681 * @region : int32_t destination array
8682 * @weight : if we are converting from cam_area_t, weight is valid
8683 * else weight = -1
8684 *
8685 *==========================================================================*/
8686void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008687 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008688{
Thierry Strudel3d639192016-09-09 11:52:26 -07008689 int32_t x_min = frame_settings.find(tag).data.i32[0];
8690 int32_t y_min = frame_settings.find(tag).data.i32[1];
8691 int32_t x_max = frame_settings.find(tag).data.i32[2];
8692 int32_t y_max = frame_settings.find(tag).data.i32[3];
8693 roi.weight = frame_settings.find(tag).data.i32[4];
8694 roi.rect.left = x_min;
8695 roi.rect.top = y_min;
8696 roi.rect.width = x_max - x_min;
8697 roi.rect.height = y_max - y_min;
8698}
8699
8700/*===========================================================================
8701 * FUNCTION : resetIfNeededROI
8702 *
8703 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8704 * crop region
8705 *
8706 * PARAMETERS :
8707 * @roi : cam_area_t struct to resize
8708 * @scalerCropRegion : cam_crop_region_t region to compare against
8709 *
8710 *
8711 *==========================================================================*/
8712bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8713 const cam_crop_region_t* scalerCropRegion)
8714{
8715 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8716 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8717 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8718 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8719
8720 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8721 * without having this check the calculations below to validate if the roi
8722 * is inside scalar crop region will fail resulting in the roi not being
8723 * reset causing algorithm to continue to use stale roi window
8724 */
8725 if (roi->weight == 0) {
8726 return true;
8727 }
8728
8729 if ((roi_x_max < scalerCropRegion->left) ||
8730 // right edge of roi window is left of scalar crop's left edge
8731 (roi_y_max < scalerCropRegion->top) ||
8732 // bottom edge of roi window is above scalar crop's top edge
8733 (roi->rect.left > crop_x_max) ||
8734 // left edge of roi window is beyond(right) of scalar crop's right edge
8735 (roi->rect.top > crop_y_max)){
8736 // top edge of roi windo is above scalar crop's top edge
8737 return false;
8738 }
8739 if (roi->rect.left < scalerCropRegion->left) {
8740 roi->rect.left = scalerCropRegion->left;
8741 }
8742 if (roi->rect.top < scalerCropRegion->top) {
8743 roi->rect.top = scalerCropRegion->top;
8744 }
8745 if (roi_x_max > crop_x_max) {
8746 roi_x_max = crop_x_max;
8747 }
8748 if (roi_y_max > crop_y_max) {
8749 roi_y_max = crop_y_max;
8750 }
8751 roi->rect.width = roi_x_max - roi->rect.left;
8752 roi->rect.height = roi_y_max - roi->rect.top;
8753 return true;
8754}
8755
8756/*===========================================================================
8757 * FUNCTION : convertLandmarks
8758 *
8759 * DESCRIPTION: helper method to extract the landmarks from face detection info
8760 *
8761 * PARAMETERS :
8762 * @landmark_data : input landmark data to be converted
8763 * @landmarks : int32_t destination array
8764 *
8765 *
8766 *==========================================================================*/
8767void QCamera3HardwareInterface::convertLandmarks(
8768 cam_face_landmarks_info_t landmark_data,
8769 int32_t *landmarks)
8770{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008771 if (landmark_data.is_left_eye_valid) {
8772 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8773 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8774 } else {
8775 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8776 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8777 }
8778
8779 if (landmark_data.is_right_eye_valid) {
8780 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8781 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8782 } else {
8783 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8784 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8785 }
8786
8787 if (landmark_data.is_mouth_valid) {
8788 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8789 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8790 } else {
8791 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8792 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8793 }
8794}
8795
8796/*===========================================================================
8797 * FUNCTION : setInvalidLandmarks
8798 *
8799 * DESCRIPTION: helper method to set invalid landmarks
8800 *
8801 * PARAMETERS :
8802 * @landmarks : int32_t destination array
8803 *
8804 *
8805 *==========================================================================*/
8806void QCamera3HardwareInterface::setInvalidLandmarks(
8807 int32_t *landmarks)
8808{
8809 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8810 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8811 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8812 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8813 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8814 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008815}
8816
8817#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008818
8819/*===========================================================================
8820 * FUNCTION : getCapabilities
8821 *
8822 * DESCRIPTION: query camera capability from back-end
8823 *
8824 * PARAMETERS :
8825 * @ops : mm-interface ops structure
8826 * @cam_handle : camera handle for which we need capability
8827 *
8828 * RETURN : ptr type of capability structure
8829 * capability for success
8830 * NULL for failure
8831 *==========================================================================*/
8832cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8833 uint32_t cam_handle)
8834{
8835 int rc = NO_ERROR;
8836 QCamera3HeapMemory *capabilityHeap = NULL;
8837 cam_capability_t *cap_ptr = NULL;
8838
8839 if (ops == NULL) {
8840 LOGE("Invalid arguments");
8841 return NULL;
8842 }
8843
8844 capabilityHeap = new QCamera3HeapMemory(1);
8845 if (capabilityHeap == NULL) {
8846 LOGE("creation of capabilityHeap failed");
8847 return NULL;
8848 }
8849
8850 /* Allocate memory for capability buffer */
8851 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8852 if(rc != OK) {
8853 LOGE("No memory for cappability");
8854 goto allocate_failed;
8855 }
8856
8857 /* Map memory for capability buffer */
8858 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8859
8860 rc = ops->map_buf(cam_handle,
8861 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8862 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8863 if(rc < 0) {
8864 LOGE("failed to map capability buffer");
8865 rc = FAILED_TRANSACTION;
8866 goto map_failed;
8867 }
8868
8869 /* Query Capability */
8870 rc = ops->query_capability(cam_handle);
8871 if(rc < 0) {
8872 LOGE("failed to query capability");
8873 rc = FAILED_TRANSACTION;
8874 goto query_failed;
8875 }
8876
8877 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8878 if (cap_ptr == NULL) {
8879 LOGE("out of memory");
8880 rc = NO_MEMORY;
8881 goto query_failed;
8882 }
8883
8884 memset(cap_ptr, 0, sizeof(cam_capability_t));
8885 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8886
8887 int index;
8888 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8889 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8890 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8891 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8892 }
8893
8894query_failed:
8895 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8896map_failed:
8897 capabilityHeap->deallocate();
8898allocate_failed:
8899 delete capabilityHeap;
8900
8901 if (rc != NO_ERROR) {
8902 return NULL;
8903 } else {
8904 return cap_ptr;
8905 }
8906}
8907
Thierry Strudel3d639192016-09-09 11:52:26 -07008908/*===========================================================================
8909 * FUNCTION : initCapabilities
8910 *
8911 * DESCRIPTION: initialize camera capabilities in static data struct
8912 *
8913 * PARAMETERS :
8914 * @cameraId : camera Id
8915 *
8916 * RETURN : int32_t type of status
8917 * NO_ERROR -- success
8918 * none-zero failure code
8919 *==========================================================================*/
8920int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8921{
8922 int rc = 0;
8923 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008924 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008925
8926 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8927 if (rc) {
8928 LOGE("camera_open failed. rc = %d", rc);
8929 goto open_failed;
8930 }
8931 if (!cameraHandle) {
8932 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8933 goto open_failed;
8934 }
8935
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008936 handle = get_main_camera_handle(cameraHandle->camera_handle);
8937 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8938 if (gCamCapability[cameraId] == NULL) {
8939 rc = FAILED_TRANSACTION;
8940 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008941 }
8942
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008943 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008944 if (is_dual_camera_by_idx(cameraId)) {
8945 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8946 gCamCapability[cameraId]->aux_cam_cap =
8947 getCapabilities(cameraHandle->ops, handle);
8948 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8949 rc = FAILED_TRANSACTION;
8950 free(gCamCapability[cameraId]);
8951 goto failed_op;
8952 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008953
8954 // Copy the main camera capability to main_cam_cap struct
8955 gCamCapability[cameraId]->main_cam_cap =
8956 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8957 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8958 LOGE("out of memory");
8959 rc = NO_MEMORY;
8960 goto failed_op;
8961 }
8962 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8963 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008964 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008965failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008966 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8967 cameraHandle = NULL;
8968open_failed:
8969 return rc;
8970}
8971
8972/*==========================================================================
8973 * FUNCTION : get3Aversion
8974 *
8975 * DESCRIPTION: get the Q3A S/W version
8976 *
8977 * PARAMETERS :
8978 * @sw_version: Reference of Q3A structure which will hold version info upon
8979 * return
8980 *
8981 * RETURN : None
8982 *
8983 *==========================================================================*/
8984void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8985{
8986 if(gCamCapability[mCameraId])
8987 sw_version = gCamCapability[mCameraId]->q3a_version;
8988 else
8989 LOGE("Capability structure NULL!");
8990}
8991
8992
8993/*===========================================================================
8994 * FUNCTION : initParameters
8995 *
8996 * DESCRIPTION: initialize camera parameters
8997 *
8998 * PARAMETERS :
8999 *
9000 * RETURN : int32_t type of status
9001 * NO_ERROR -- success
9002 * none-zero failure code
9003 *==========================================================================*/
9004int QCamera3HardwareInterface::initParameters()
9005{
9006 int rc = 0;
9007
9008 //Allocate Set Param Buffer
9009 mParamHeap = new QCamera3HeapMemory(1);
9010 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9011 if(rc != OK) {
9012 rc = NO_MEMORY;
9013 LOGE("Failed to allocate SETPARM Heap memory");
9014 delete mParamHeap;
9015 mParamHeap = NULL;
9016 return rc;
9017 }
9018
9019 //Map memory for parameters buffer
9020 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9021 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9022 mParamHeap->getFd(0),
9023 sizeof(metadata_buffer_t),
9024 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9025 if(rc < 0) {
9026 LOGE("failed to map SETPARM buffer");
9027 rc = FAILED_TRANSACTION;
9028 mParamHeap->deallocate();
9029 delete mParamHeap;
9030 mParamHeap = NULL;
9031 return rc;
9032 }
9033
9034 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9035
9036 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9037 return rc;
9038}
9039
9040/*===========================================================================
9041 * FUNCTION : deinitParameters
9042 *
9043 * DESCRIPTION: de-initialize camera parameters
9044 *
9045 * PARAMETERS :
9046 *
9047 * RETURN : NONE
9048 *==========================================================================*/
9049void QCamera3HardwareInterface::deinitParameters()
9050{
9051 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9052 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9053
9054 mParamHeap->deallocate();
9055 delete mParamHeap;
9056 mParamHeap = NULL;
9057
9058 mParameters = NULL;
9059
9060 free(mPrevParameters);
9061 mPrevParameters = NULL;
9062}
9063
9064/*===========================================================================
9065 * FUNCTION : calcMaxJpegSize
9066 *
9067 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9068 *
9069 * PARAMETERS :
9070 *
9071 * RETURN : max_jpeg_size
9072 *==========================================================================*/
9073size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9074{
9075 size_t max_jpeg_size = 0;
9076 size_t temp_width, temp_height;
9077 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9078 MAX_SIZES_CNT);
9079 for (size_t i = 0; i < count; i++) {
9080 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9081 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9082 if (temp_width * temp_height > max_jpeg_size ) {
9083 max_jpeg_size = temp_width * temp_height;
9084 }
9085 }
9086 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9087 return max_jpeg_size;
9088}
9089
9090/*===========================================================================
9091 * FUNCTION : getMaxRawSize
9092 *
9093 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9094 *
9095 * PARAMETERS :
9096 *
9097 * RETURN : Largest supported Raw Dimension
9098 *==========================================================================*/
9099cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9100{
9101 int max_width = 0;
9102 cam_dimension_t maxRawSize;
9103
9104 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9105 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9106 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9107 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9108 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9109 }
9110 }
9111 return maxRawSize;
9112}
9113
9114
9115/*===========================================================================
9116 * FUNCTION : calcMaxJpegDim
9117 *
9118 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9119 *
9120 * PARAMETERS :
9121 *
9122 * RETURN : max_jpeg_dim
9123 *==========================================================================*/
9124cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9125{
9126 cam_dimension_t max_jpeg_dim;
9127 cam_dimension_t curr_jpeg_dim;
9128 max_jpeg_dim.width = 0;
9129 max_jpeg_dim.height = 0;
9130 curr_jpeg_dim.width = 0;
9131 curr_jpeg_dim.height = 0;
9132 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9133 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9134 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9135 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9136 max_jpeg_dim.width * max_jpeg_dim.height ) {
9137 max_jpeg_dim.width = curr_jpeg_dim.width;
9138 max_jpeg_dim.height = curr_jpeg_dim.height;
9139 }
9140 }
9141 return max_jpeg_dim;
9142}
9143
9144/*===========================================================================
9145 * FUNCTION : addStreamConfig
9146 *
9147 * DESCRIPTION: adds the stream configuration to the array
9148 *
9149 * PARAMETERS :
9150 * @available_stream_configs : pointer to stream configuration array
9151 * @scalar_format : scalar format
9152 * @dim : configuration dimension
9153 * @config_type : input or output configuration type
9154 *
9155 * RETURN : NONE
9156 *==========================================================================*/
9157void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9158 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9159{
9160 available_stream_configs.add(scalar_format);
9161 available_stream_configs.add(dim.width);
9162 available_stream_configs.add(dim.height);
9163 available_stream_configs.add(config_type);
9164}
9165
9166/*===========================================================================
9167 * FUNCTION : suppportBurstCapture
9168 *
9169 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9170 *
9171 * PARAMETERS :
9172 * @cameraId : camera Id
9173 *
9174 * RETURN : true if camera supports BURST_CAPTURE
9175 * false otherwise
9176 *==========================================================================*/
9177bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9178{
9179 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9180 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9181 const int32_t highResWidth = 3264;
9182 const int32_t highResHeight = 2448;
9183
9184 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9185 // Maximum resolution images cannot be captured at >= 10fps
9186 // -> not supporting BURST_CAPTURE
9187 return false;
9188 }
9189
9190 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9191 // Maximum resolution images can be captured at >= 20fps
9192 // --> supporting BURST_CAPTURE
9193 return true;
9194 }
9195
9196 // Find the smallest highRes resolution, or largest resolution if there is none
9197 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9198 MAX_SIZES_CNT);
9199 size_t highRes = 0;
9200 while ((highRes + 1 < totalCnt) &&
9201 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9202 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9203 highResWidth * highResHeight)) {
9204 highRes++;
9205 }
9206 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9207 return true;
9208 } else {
9209 return false;
9210 }
9211}
9212
9213/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009214 * FUNCTION : getPDStatIndex
9215 *
9216 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9217 *
9218 * PARAMETERS :
9219 * @caps : camera capabilities
9220 *
9221 * RETURN : int32_t type
9222 * non-negative - on success
9223 * -1 - on failure
9224 *==========================================================================*/
9225int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9226 if (nullptr == caps) {
9227 return -1;
9228 }
9229
9230 uint32_t metaRawCount = caps->meta_raw_channel_count;
9231 int32_t ret = -1;
9232 for (size_t i = 0; i < metaRawCount; i++) {
9233 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9234 ret = i;
9235 break;
9236 }
9237 }
9238
9239 return ret;
9240}
9241
9242/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009243 * FUNCTION : initStaticMetadata
9244 *
9245 * DESCRIPTION: initialize the static metadata
9246 *
9247 * PARAMETERS :
9248 * @cameraId : camera Id
9249 *
9250 * RETURN : int32_t type of status
9251 * 0 -- success
9252 * non-zero failure code
9253 *==========================================================================*/
9254int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9255{
9256 int rc = 0;
9257 CameraMetadata staticInfo;
9258 size_t count = 0;
9259 bool limitedDevice = false;
9260 char prop[PROPERTY_VALUE_MAX];
9261 bool supportBurst = false;
9262
9263 supportBurst = supportBurstCapture(cameraId);
9264
9265 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9266 * guaranteed or if min fps of max resolution is less than 20 fps, its
9267 * advertised as limited device*/
9268 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9269 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9270 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9271 !supportBurst;
9272
9273 uint8_t supportedHwLvl = limitedDevice ?
9274 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009275#ifndef USE_HAL_3_3
9276 // LEVEL_3 - This device will support level 3.
9277 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9278#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009279 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009280#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009281
9282 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9283 &supportedHwLvl, 1);
9284
9285 bool facingBack = false;
9286 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9287 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9288 facingBack = true;
9289 }
9290 /*HAL 3 only*/
9291 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9292 &gCamCapability[cameraId]->min_focus_distance, 1);
9293
9294 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9295 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9296
9297 /*should be using focal lengths but sensor doesn't provide that info now*/
9298 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9299 &gCamCapability[cameraId]->focal_length,
9300 1);
9301
9302 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9303 gCamCapability[cameraId]->apertures,
9304 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9305
9306 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9307 gCamCapability[cameraId]->filter_densities,
9308 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9309
9310
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009311 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9312 size_t mode_count =
9313 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9314 for (size_t i = 0; i < mode_count; i++) {
9315 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9316 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009317 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009318 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009319
9320 int32_t lens_shading_map_size[] = {
9321 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9322 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9323 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9324 lens_shading_map_size,
9325 sizeof(lens_shading_map_size)/sizeof(int32_t));
9326
9327 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9328 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9329
9330 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9331 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9332
9333 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9334 &gCamCapability[cameraId]->max_frame_duration, 1);
9335
9336 camera_metadata_rational baseGainFactor = {
9337 gCamCapability[cameraId]->base_gain_factor.numerator,
9338 gCamCapability[cameraId]->base_gain_factor.denominator};
9339 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9340 &baseGainFactor, 1);
9341
9342 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9343 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9344
9345 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9346 gCamCapability[cameraId]->pixel_array_size.height};
9347 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9348 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9349
9350 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9351 gCamCapability[cameraId]->active_array_size.top,
9352 gCamCapability[cameraId]->active_array_size.width,
9353 gCamCapability[cameraId]->active_array_size.height};
9354 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9355 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9356
9357 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9358 &gCamCapability[cameraId]->white_level, 1);
9359
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009360 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9361 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9362 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009363 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009364 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009365
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009366#ifndef USE_HAL_3_3
9367 bool hasBlackRegions = false;
9368 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9369 LOGW("black_region_count: %d is bounded to %d",
9370 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9371 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9372 }
9373 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9374 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9375 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9376 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9377 }
9378 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9379 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9380 hasBlackRegions = true;
9381 }
9382#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009383 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9384 &gCamCapability[cameraId]->flash_charge_duration, 1);
9385
9386 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9387 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9388
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009389 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9390 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9391 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009392 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9393 &timestampSource, 1);
9394
Thierry Strudel54dc9782017-02-15 12:12:10 -08009395 //update histogram vendor data
9396 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009397 &gCamCapability[cameraId]->histogram_size, 1);
9398
Thierry Strudel54dc9782017-02-15 12:12:10 -08009399 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009400 &gCamCapability[cameraId]->max_histogram_count, 1);
9401
Shuzhen Wang14415f52016-11-16 18:26:18 -08009402 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9403 //so that app can request fewer number of bins than the maximum supported.
9404 std::vector<int32_t> histBins;
9405 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9406 histBins.push_back(maxHistBins);
9407 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9408 (maxHistBins & 0x1) == 0) {
9409 histBins.push_back(maxHistBins >> 1);
9410 maxHistBins >>= 1;
9411 }
9412 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9413 histBins.data(), histBins.size());
9414
Thierry Strudel3d639192016-09-09 11:52:26 -07009415 int32_t sharpness_map_size[] = {
9416 gCamCapability[cameraId]->sharpness_map_size.width,
9417 gCamCapability[cameraId]->sharpness_map_size.height};
9418
9419 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9420 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9421
9422 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9423 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9424
Emilian Peev0f3c3162017-03-15 12:57:46 +00009425 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9426 if (0 <= indexPD) {
9427 // Advertise PD stats data as part of the Depth capabilities
9428 int32_t depthWidth =
9429 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9430 int32_t depthHeight =
9431 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009432 int32_t depthStride =
9433 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009434 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9435 assert(0 < depthSamplesCount);
9436 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9437 &depthSamplesCount, 1);
9438
9439 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9440 depthHeight,
9441 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9442 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9443 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9444 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9445 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9446
9447 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9448 depthHeight, 33333333,
9449 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9450 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9451 depthMinDuration,
9452 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9453
9454 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9455 depthHeight, 0,
9456 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9457 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9458 depthStallDuration,
9459 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9460
9461 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9462 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009463
9464 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9465 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9466 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev835938b2017-08-31 16:59:54 +01009467
9468 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS,
9469 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.right_gain_map),
9470 sizeof(gCamCapability[cameraId]->pdaf_cal.right_gain_map));
9471
9472 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS,
9473 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.left_gain_map),
9474 sizeof(gCamCapability[cameraId]->pdaf_cal.left_gain_map));
9475
9476 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF,
9477 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.conversion_coeff),
9478 sizeof(gCamCapability[cameraId]->pdaf_cal.conversion_coeff));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009479 }
9480
Thierry Strudel3d639192016-09-09 11:52:26 -07009481 int32_t scalar_formats[] = {
9482 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9483 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9484 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9485 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9486 HAL_PIXEL_FORMAT_RAW10,
9487 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009488 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9489 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9490 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009491
9492 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9493 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9494 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9495 count, MAX_SIZES_CNT, available_processed_sizes);
9496 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9497 available_processed_sizes, count * 2);
9498
9499 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9500 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9501 makeTable(gCamCapability[cameraId]->raw_dim,
9502 count, MAX_SIZES_CNT, available_raw_sizes);
9503 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9504 available_raw_sizes, count * 2);
9505
9506 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9507 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9508 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9509 count, MAX_SIZES_CNT, available_fps_ranges);
9510 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9511 available_fps_ranges, count * 2);
9512
9513 camera_metadata_rational exposureCompensationStep = {
9514 gCamCapability[cameraId]->exp_compensation_step.numerator,
9515 gCamCapability[cameraId]->exp_compensation_step.denominator};
9516 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9517 &exposureCompensationStep, 1);
9518
9519 Vector<uint8_t> availableVstabModes;
9520 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9521 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009522 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009523 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009524 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009525 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009526 count = IS_TYPE_MAX;
9527 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9528 for (size_t i = 0; i < count; i++) {
9529 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9530 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9531 eisSupported = true;
9532 break;
9533 }
9534 }
9535 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009536 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9537 }
9538 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9539 availableVstabModes.array(), availableVstabModes.size());
9540
9541 /*HAL 1 and HAL 3 common*/
9542 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9543 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9544 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009545 // Cap the max zoom to the max preferred value
9546 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009547 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9548 &maxZoom, 1);
9549
9550 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9551 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9552
9553 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9554 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9555 max3aRegions[2] = 0; /* AF not supported */
9556 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9557 max3aRegions, 3);
9558
9559 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9560 memset(prop, 0, sizeof(prop));
9561 property_get("persist.camera.facedetect", prop, "1");
9562 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9563 LOGD("Support face detection mode: %d",
9564 supportedFaceDetectMode);
9565
9566 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009567 /* support mode should be OFF if max number of face is 0 */
9568 if (maxFaces <= 0) {
9569 supportedFaceDetectMode = 0;
9570 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009571 Vector<uint8_t> availableFaceDetectModes;
9572 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9573 if (supportedFaceDetectMode == 1) {
9574 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9575 } else if (supportedFaceDetectMode == 2) {
9576 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9577 } else if (supportedFaceDetectMode == 3) {
9578 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9579 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9580 } else {
9581 maxFaces = 0;
9582 }
9583 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9584 availableFaceDetectModes.array(),
9585 availableFaceDetectModes.size());
9586 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9587 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009588 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9589 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9590 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009591
9592 int32_t exposureCompensationRange[] = {
9593 gCamCapability[cameraId]->exposure_compensation_min,
9594 gCamCapability[cameraId]->exposure_compensation_max};
9595 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9596 exposureCompensationRange,
9597 sizeof(exposureCompensationRange)/sizeof(int32_t));
9598
9599 uint8_t lensFacing = (facingBack) ?
9600 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9601 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9602
9603 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9604 available_thumbnail_sizes,
9605 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9606
9607 /*all sizes will be clubbed into this tag*/
9608 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9609 /*android.scaler.availableStreamConfigurations*/
9610 Vector<int32_t> available_stream_configs;
9611 cam_dimension_t active_array_dim;
9612 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9613 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009614
9615 /*advertise list of input dimensions supported based on below property.
9616 By default all sizes upto 5MP will be advertised.
9617 Note that the setprop resolution format should be WxH.
9618 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9619 To list all supported sizes, setprop needs to be set with "0x0" */
9620 cam_dimension_t minInputSize = {2592,1944}; //5MP
9621 memset(prop, 0, sizeof(prop));
9622 property_get("persist.camera.input.minsize", prop, "2592x1944");
9623 if (strlen(prop) > 0) {
9624 char *saveptr = NULL;
9625 char *token = strtok_r(prop, "x", &saveptr);
9626 if (token != NULL) {
9627 minInputSize.width = atoi(token);
9628 }
9629 token = strtok_r(NULL, "x", &saveptr);
9630 if (token != NULL) {
9631 minInputSize.height = atoi(token);
9632 }
9633 }
9634
Thierry Strudel3d639192016-09-09 11:52:26 -07009635 /* Add input/output stream configurations for each scalar formats*/
9636 for (size_t j = 0; j < scalar_formats_count; j++) {
9637 switch (scalar_formats[j]) {
9638 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9639 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9640 case HAL_PIXEL_FORMAT_RAW10:
9641 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9642 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9643 addStreamConfig(available_stream_configs, scalar_formats[j],
9644 gCamCapability[cameraId]->raw_dim[i],
9645 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9646 }
9647 break;
9648 case HAL_PIXEL_FORMAT_BLOB:
9649 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9650 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9651 addStreamConfig(available_stream_configs, scalar_formats[j],
9652 gCamCapability[cameraId]->picture_sizes_tbl[i],
9653 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9654 }
9655 break;
9656 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9657 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9658 default:
9659 cam_dimension_t largest_picture_size;
9660 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9661 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9662 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9663 addStreamConfig(available_stream_configs, scalar_formats[j],
9664 gCamCapability[cameraId]->picture_sizes_tbl[i],
9665 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009666 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009667 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9668 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009669 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9670 >= minInputSize.width) || (gCamCapability[cameraId]->
9671 picture_sizes_tbl[i].height >= minInputSize.height)) {
9672 addStreamConfig(available_stream_configs, scalar_formats[j],
9673 gCamCapability[cameraId]->picture_sizes_tbl[i],
9674 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9675 }
9676 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009677 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009678
Thierry Strudel3d639192016-09-09 11:52:26 -07009679 break;
9680 }
9681 }
9682
9683 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9684 available_stream_configs.array(), available_stream_configs.size());
9685 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9686 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9687
9688 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9689 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9690
9691 /* android.scaler.availableMinFrameDurations */
9692 Vector<int64_t> available_min_durations;
9693 for (size_t j = 0; j < scalar_formats_count; j++) {
9694 switch (scalar_formats[j]) {
9695 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9696 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9697 case HAL_PIXEL_FORMAT_RAW10:
9698 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9699 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9700 available_min_durations.add(scalar_formats[j]);
9701 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9702 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9703 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9704 }
9705 break;
9706 default:
9707 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9708 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9709 available_min_durations.add(scalar_formats[j]);
9710 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9711 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9712 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9713 }
9714 break;
9715 }
9716 }
9717 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9718 available_min_durations.array(), available_min_durations.size());
9719
9720 Vector<int32_t> available_hfr_configs;
9721 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9722 int32_t fps = 0;
9723 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9724 case CAM_HFR_MODE_60FPS:
9725 fps = 60;
9726 break;
9727 case CAM_HFR_MODE_90FPS:
9728 fps = 90;
9729 break;
9730 case CAM_HFR_MODE_120FPS:
9731 fps = 120;
9732 break;
9733 case CAM_HFR_MODE_150FPS:
9734 fps = 150;
9735 break;
9736 case CAM_HFR_MODE_180FPS:
9737 fps = 180;
9738 break;
9739 case CAM_HFR_MODE_210FPS:
9740 fps = 210;
9741 break;
9742 case CAM_HFR_MODE_240FPS:
9743 fps = 240;
9744 break;
9745 case CAM_HFR_MODE_480FPS:
9746 fps = 480;
9747 break;
9748 case CAM_HFR_MODE_OFF:
9749 case CAM_HFR_MODE_MAX:
9750 default:
9751 break;
9752 }
9753
9754 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9755 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9756 /* For each HFR frame rate, need to advertise one variable fps range
9757 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9758 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9759 * set by the app. When video recording is started, [120, 120] is
9760 * set. This way sensor configuration does not change when recording
9761 * is started */
9762
9763 /* (width, height, fps_min, fps_max, batch_size_max) */
9764 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9765 j < MAX_SIZES_CNT; j++) {
9766 available_hfr_configs.add(
9767 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9768 available_hfr_configs.add(
9769 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9770 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9771 available_hfr_configs.add(fps);
9772 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9773
9774 /* (width, height, fps_min, fps_max, batch_size_max) */
9775 available_hfr_configs.add(
9776 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9777 available_hfr_configs.add(
9778 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9779 available_hfr_configs.add(fps);
9780 available_hfr_configs.add(fps);
9781 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9782 }
9783 }
9784 }
9785 //Advertise HFR capability only if the property is set
9786 memset(prop, 0, sizeof(prop));
9787 property_get("persist.camera.hal3hfr.enable", prop, "1");
9788 uint8_t hfrEnable = (uint8_t)atoi(prop);
9789
9790 if(hfrEnable && available_hfr_configs.array()) {
9791 staticInfo.update(
9792 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9793 available_hfr_configs.array(), available_hfr_configs.size());
9794 }
9795
9796 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9797 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9798 &max_jpeg_size, 1);
9799
9800 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9801 size_t size = 0;
9802 count = CAM_EFFECT_MODE_MAX;
9803 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9804 for (size_t i = 0; i < count; i++) {
9805 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9806 gCamCapability[cameraId]->supported_effects[i]);
9807 if (NAME_NOT_FOUND != val) {
9808 avail_effects[size] = (uint8_t)val;
9809 size++;
9810 }
9811 }
9812 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9813 avail_effects,
9814 size);
9815
9816 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9817 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9818 size_t supported_scene_modes_cnt = 0;
9819 count = CAM_SCENE_MODE_MAX;
9820 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9821 for (size_t i = 0; i < count; i++) {
9822 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9823 CAM_SCENE_MODE_OFF) {
9824 int val = lookupFwkName(SCENE_MODES_MAP,
9825 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9826 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009827
Thierry Strudel3d639192016-09-09 11:52:26 -07009828 if (NAME_NOT_FOUND != val) {
9829 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9830 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9831 supported_scene_modes_cnt++;
9832 }
9833 }
9834 }
9835 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9836 avail_scene_modes,
9837 supported_scene_modes_cnt);
9838
9839 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9840 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9841 supported_scene_modes_cnt,
9842 CAM_SCENE_MODE_MAX,
9843 scene_mode_overrides,
9844 supported_indexes,
9845 cameraId);
9846
9847 if (supported_scene_modes_cnt == 0) {
9848 supported_scene_modes_cnt = 1;
9849 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9850 }
9851
9852 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9853 scene_mode_overrides, supported_scene_modes_cnt * 3);
9854
9855 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9856 ANDROID_CONTROL_MODE_AUTO,
9857 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9858 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9859 available_control_modes,
9860 3);
9861
9862 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9863 size = 0;
9864 count = CAM_ANTIBANDING_MODE_MAX;
9865 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9866 for (size_t i = 0; i < count; i++) {
9867 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9868 gCamCapability[cameraId]->supported_antibandings[i]);
9869 if (NAME_NOT_FOUND != val) {
9870 avail_antibanding_modes[size] = (uint8_t)val;
9871 size++;
9872 }
9873
9874 }
9875 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9876 avail_antibanding_modes,
9877 size);
9878
9879 uint8_t avail_abberation_modes[] = {
9880 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9881 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9882 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9883 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9884 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9885 if (0 == count) {
9886 // If no aberration correction modes are available for a device, this advertise OFF mode
9887 size = 1;
9888 } else {
9889 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9890 // So, advertize all 3 modes if atleast any one mode is supported as per the
9891 // new M requirement
9892 size = 3;
9893 }
9894 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9895 avail_abberation_modes,
9896 size);
9897
9898 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9899 size = 0;
9900 count = CAM_FOCUS_MODE_MAX;
9901 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9902 for (size_t i = 0; i < count; i++) {
9903 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9904 gCamCapability[cameraId]->supported_focus_modes[i]);
9905 if (NAME_NOT_FOUND != val) {
9906 avail_af_modes[size] = (uint8_t)val;
9907 size++;
9908 }
9909 }
9910 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9911 avail_af_modes,
9912 size);
9913
9914 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9915 size = 0;
9916 count = CAM_WB_MODE_MAX;
9917 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9918 for (size_t i = 0; i < count; i++) {
9919 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9920 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9921 gCamCapability[cameraId]->supported_white_balances[i]);
9922 if (NAME_NOT_FOUND != val) {
9923 avail_awb_modes[size] = (uint8_t)val;
9924 size++;
9925 }
9926 }
9927 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9928 avail_awb_modes,
9929 size);
9930
9931 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9932 count = CAM_FLASH_FIRING_LEVEL_MAX;
9933 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9934 count);
9935 for (size_t i = 0; i < count; i++) {
9936 available_flash_levels[i] =
9937 gCamCapability[cameraId]->supported_firing_levels[i];
9938 }
9939 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9940 available_flash_levels, count);
9941
9942 uint8_t flashAvailable;
9943 if (gCamCapability[cameraId]->flash_available)
9944 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9945 else
9946 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9947 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9948 &flashAvailable, 1);
9949
9950 Vector<uint8_t> avail_ae_modes;
9951 count = CAM_AE_MODE_MAX;
9952 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9953 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009954 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9955 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9956 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9957 }
9958 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009959 }
9960 if (flashAvailable) {
9961 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9962 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9963 }
9964 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9965 avail_ae_modes.array(),
9966 avail_ae_modes.size());
9967
9968 int32_t sensitivity_range[2];
9969 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9970 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9971 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9972 sensitivity_range,
9973 sizeof(sensitivity_range) / sizeof(int32_t));
9974
9975 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9976 &gCamCapability[cameraId]->max_analog_sensitivity,
9977 1);
9978
9979 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9980 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9981 &sensor_orientation,
9982 1);
9983
9984 int32_t max_output_streams[] = {
9985 MAX_STALLING_STREAMS,
9986 MAX_PROCESSED_STREAMS,
9987 MAX_RAW_STREAMS};
9988 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9989 max_output_streams,
9990 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9991
9992 uint8_t avail_leds = 0;
9993 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9994 &avail_leds, 0);
9995
9996 uint8_t focus_dist_calibrated;
9997 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9998 gCamCapability[cameraId]->focus_dist_calibrated);
9999 if (NAME_NOT_FOUND != val) {
10000 focus_dist_calibrated = (uint8_t)val;
10001 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10002 &focus_dist_calibrated, 1);
10003 }
10004
10005 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
10006 size = 0;
10007 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
10008 MAX_TEST_PATTERN_CNT);
10009 for (size_t i = 0; i < count; i++) {
10010 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
10011 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10012 if (NAME_NOT_FOUND != testpatternMode) {
10013 avail_testpattern_modes[size] = testpatternMode;
10014 size++;
10015 }
10016 }
10017 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10018 avail_testpattern_modes,
10019 size);
10020
10021 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10022 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10023 &max_pipeline_depth,
10024 1);
10025
10026 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10027 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10028 &partial_result_count,
10029 1);
10030
10031 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10032 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10033
10034 Vector<uint8_t> available_capabilities;
10035 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10036 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10037 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10038 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10039 if (supportBurst) {
10040 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10041 }
10042 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10043 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10044 if (hfrEnable && available_hfr_configs.array()) {
10045 available_capabilities.add(
10046 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10047 }
10048
10049 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10050 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10051 }
10052 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10053 available_capabilities.array(),
10054 available_capabilities.size());
10055
10056 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10057 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10058 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10059 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10060
10061 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10062 &aeLockAvailable, 1);
10063
10064 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10065 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10066 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10067 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10068
10069 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10070 &awbLockAvailable, 1);
10071
10072 int32_t max_input_streams = 1;
10073 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10074 &max_input_streams,
10075 1);
10076
10077 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10078 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10079 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10080 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10081 HAL_PIXEL_FORMAT_YCbCr_420_888};
10082 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10083 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10084
10085 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10086 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10087 &max_latency,
10088 1);
10089
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010090#ifndef USE_HAL_3_3
10091 int32_t isp_sensitivity_range[2];
10092 isp_sensitivity_range[0] =
10093 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10094 isp_sensitivity_range[1] =
10095 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10096 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10097 isp_sensitivity_range,
10098 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10099#endif
10100
Thierry Strudel3d639192016-09-09 11:52:26 -070010101 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10102 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10103 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10104 available_hot_pixel_modes,
10105 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10106
10107 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10108 ANDROID_SHADING_MODE_FAST,
10109 ANDROID_SHADING_MODE_HIGH_QUALITY};
10110 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10111 available_shading_modes,
10112 3);
10113
10114 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10115 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10116 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10117 available_lens_shading_map_modes,
10118 2);
10119
10120 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10121 ANDROID_EDGE_MODE_FAST,
10122 ANDROID_EDGE_MODE_HIGH_QUALITY,
10123 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10124 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10125 available_edge_modes,
10126 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10127
10128 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10129 ANDROID_NOISE_REDUCTION_MODE_FAST,
10130 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10131 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10132 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10133 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10134 available_noise_red_modes,
10135 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10136
10137 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10138 ANDROID_TONEMAP_MODE_FAST,
10139 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10140 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10141 available_tonemap_modes,
10142 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10143
10144 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10145 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10146 available_hot_pixel_map_modes,
10147 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10148
10149 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10150 gCamCapability[cameraId]->reference_illuminant1);
10151 if (NAME_NOT_FOUND != val) {
10152 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10153 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10154 }
10155
10156 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10157 gCamCapability[cameraId]->reference_illuminant2);
10158 if (NAME_NOT_FOUND != val) {
10159 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10160 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10161 }
10162
10163 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10164 (void *)gCamCapability[cameraId]->forward_matrix1,
10165 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10166
10167 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10168 (void *)gCamCapability[cameraId]->forward_matrix2,
10169 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10170
10171 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10172 (void *)gCamCapability[cameraId]->color_transform1,
10173 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10174
10175 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10176 (void *)gCamCapability[cameraId]->color_transform2,
10177 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10178
10179 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10180 (void *)gCamCapability[cameraId]->calibration_transform1,
10181 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10182
10183 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10184 (void *)gCamCapability[cameraId]->calibration_transform2,
10185 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10186
10187 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10188 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10189 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10190 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10191 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10192 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10193 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10194 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10195 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10196 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10197 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10198 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10199 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10200 ANDROID_JPEG_GPS_COORDINATES,
10201 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10202 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10203 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10204 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10205 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10206 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10207 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10208 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10209 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10210 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010211#ifndef USE_HAL_3_3
10212 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10213#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010214 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010215 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010216 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10217 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010218 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010219 /* DevCamDebug metadata request_keys_basic */
10220 DEVCAMDEBUG_META_ENABLE,
10221 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010222 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010223 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010224 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010225 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010226 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010227 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010228
10229 size_t request_keys_cnt =
10230 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10231 Vector<int32_t> available_request_keys;
10232 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10233 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10234 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10235 }
10236
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010237 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010238 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010239 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070010240 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
Chien-Yu Chenec328c82017-08-30 16:41:08 -070010241 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010242 }
10243
Thierry Strudel3d639192016-09-09 11:52:26 -070010244 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10245 available_request_keys.array(), available_request_keys.size());
10246
10247 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10248 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10249 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10250 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10251 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10252 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10253 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10254 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10255 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10256 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10257 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10258 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10259 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10260 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10261 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10262 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10263 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010264 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010265 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10266 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10267 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010268 ANDROID_STATISTICS_FACE_SCORES,
10269#ifndef USE_HAL_3_3
10270 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10271#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010272 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010273 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010274 // DevCamDebug metadata result_keys_basic
10275 DEVCAMDEBUG_META_ENABLE,
10276 // DevCamDebug metadata result_keys AF
10277 DEVCAMDEBUG_AF_LENS_POSITION,
10278 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10279 DEVCAMDEBUG_AF_TOF_DISTANCE,
10280 DEVCAMDEBUG_AF_LUMA,
10281 DEVCAMDEBUG_AF_HAF_STATE,
10282 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10283 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10284 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10285 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10286 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10287 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10288 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10289 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10290 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10291 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10292 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10293 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10294 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10295 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10296 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10297 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10298 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10299 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10300 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10301 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10302 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10303 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10304 // DevCamDebug metadata result_keys AEC
10305 DEVCAMDEBUG_AEC_TARGET_LUMA,
10306 DEVCAMDEBUG_AEC_COMP_LUMA,
10307 DEVCAMDEBUG_AEC_AVG_LUMA,
10308 DEVCAMDEBUG_AEC_CUR_LUMA,
10309 DEVCAMDEBUG_AEC_LINECOUNT,
10310 DEVCAMDEBUG_AEC_REAL_GAIN,
10311 DEVCAMDEBUG_AEC_EXP_INDEX,
10312 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010313 // DevCamDebug metadata result_keys zzHDR
10314 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10315 DEVCAMDEBUG_AEC_L_LINECOUNT,
10316 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10317 DEVCAMDEBUG_AEC_S_LINECOUNT,
10318 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10319 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10320 // DevCamDebug metadata result_keys ADRC
10321 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10322 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10323 DEVCAMDEBUG_AEC_GTM_RATIO,
10324 DEVCAMDEBUG_AEC_LTM_RATIO,
10325 DEVCAMDEBUG_AEC_LA_RATIO,
10326 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010327 // DevCamDebug metadata result_keys AEC MOTION
10328 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10329 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10330 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010331 // DevCamDebug metadata result_keys AWB
10332 DEVCAMDEBUG_AWB_R_GAIN,
10333 DEVCAMDEBUG_AWB_G_GAIN,
10334 DEVCAMDEBUG_AWB_B_GAIN,
10335 DEVCAMDEBUG_AWB_CCT,
10336 DEVCAMDEBUG_AWB_DECISION,
10337 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010338 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10339 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10340 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010341 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010342 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010343 };
10344
Thierry Strudel3d639192016-09-09 11:52:26 -070010345 size_t result_keys_cnt =
10346 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10347
10348 Vector<int32_t> available_result_keys;
10349 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10350 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10351 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10352 }
10353 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10354 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10355 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10356 }
10357 if (supportedFaceDetectMode == 1) {
10358 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10359 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10360 } else if ((supportedFaceDetectMode == 2) ||
10361 (supportedFaceDetectMode == 3)) {
10362 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10363 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10364 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010365#ifndef USE_HAL_3_3
10366 if (hasBlackRegions) {
10367 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10368 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10369 }
10370#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010371
10372 if (gExposeEnableZslKey) {
10373 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chendaf68892017-08-25 12:56:40 -070010374 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010375 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
10376 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010377 }
10378
Thierry Strudel3d639192016-09-09 11:52:26 -070010379 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10380 available_result_keys.array(), available_result_keys.size());
10381
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010382 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010383 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10384 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10385 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10386 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10387 ANDROID_SCALER_CROPPING_TYPE,
10388 ANDROID_SYNC_MAX_LATENCY,
10389 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10390 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10391 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10392 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10393 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10394 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10395 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10396 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10397 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10398 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10399 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10400 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10401 ANDROID_LENS_FACING,
10402 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10403 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10404 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10405 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10406 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10407 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10408 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10409 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10410 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10411 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10412 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10413 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10414 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10415 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10416 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10417 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10418 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10419 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10420 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10421 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010422 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010423 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10424 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10425 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10426 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10427 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10428 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10429 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10430 ANDROID_CONTROL_AVAILABLE_MODES,
10431 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10432 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10433 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10434 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010435 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10436#ifndef USE_HAL_3_3
10437 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10438 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10439#endif
10440 };
10441
10442 Vector<int32_t> available_characteristics_keys;
10443 available_characteristics_keys.appendArray(characteristics_keys_basic,
10444 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10445#ifndef USE_HAL_3_3
10446 if (hasBlackRegions) {
10447 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10448 }
10449#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010450
10451 if (0 <= indexPD) {
10452 int32_t depthKeys[] = {
10453 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10454 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10455 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10456 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10457 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10458 };
10459 available_characteristics_keys.appendArray(depthKeys,
10460 sizeof(depthKeys) / sizeof(depthKeys[0]));
10461 }
10462
Thierry Strudel3d639192016-09-09 11:52:26 -070010463 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010464 available_characteristics_keys.array(),
10465 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010466
10467 /*available stall durations depend on the hw + sw and will be different for different devices */
10468 /*have to add for raw after implementation*/
10469 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10470 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10471
10472 Vector<int64_t> available_stall_durations;
10473 for (uint32_t j = 0; j < stall_formats_count; j++) {
10474 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10475 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10476 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10477 available_stall_durations.add(stall_formats[j]);
10478 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10479 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10480 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10481 }
10482 } else {
10483 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10484 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10485 available_stall_durations.add(stall_formats[j]);
10486 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10487 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10488 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10489 }
10490 }
10491 }
10492 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10493 available_stall_durations.array(),
10494 available_stall_durations.size());
10495
10496 //QCAMERA3_OPAQUE_RAW
10497 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10498 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10499 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10500 case LEGACY_RAW:
10501 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10502 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10503 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10504 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10505 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10506 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10507 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10508 break;
10509 case MIPI_RAW:
10510 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10511 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10512 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10513 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10514 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10515 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10516 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10517 break;
10518 default:
10519 LOGE("unknown opaque_raw_format %d",
10520 gCamCapability[cameraId]->opaque_raw_fmt);
10521 break;
10522 }
10523 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10524
10525 Vector<int32_t> strides;
10526 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10527 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10528 cam_stream_buf_plane_info_t buf_planes;
10529 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10530 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10531 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10532 &gCamCapability[cameraId]->padding_info, &buf_planes);
10533 strides.add(buf_planes.plane_info.mp[0].stride);
10534 }
10535 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10536 strides.size());
10537
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010538 //TBD: remove the following line once backend advertises zzHDR in feature mask
10539 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010540 //Video HDR default
10541 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10542 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010543 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010544 int32_t vhdr_mode[] = {
10545 QCAMERA3_VIDEO_HDR_MODE_OFF,
10546 QCAMERA3_VIDEO_HDR_MODE_ON};
10547
10548 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10549 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10550 vhdr_mode, vhdr_mode_count);
10551 }
10552
Thierry Strudel3d639192016-09-09 11:52:26 -070010553 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10554 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10555 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10556
10557 uint8_t isMonoOnly =
10558 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10559 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10560 &isMonoOnly, 1);
10561
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010562#ifndef USE_HAL_3_3
10563 Vector<int32_t> opaque_size;
10564 for (size_t j = 0; j < scalar_formats_count; j++) {
10565 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10566 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10567 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10568 cam_stream_buf_plane_info_t buf_planes;
10569
10570 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10571 &gCamCapability[cameraId]->padding_info, &buf_planes);
10572
10573 if (rc == 0) {
10574 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10575 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10576 opaque_size.add(buf_planes.plane_info.frame_len);
10577 }else {
10578 LOGE("raw frame calculation failed!");
10579 }
10580 }
10581 }
10582 }
10583
10584 if ((opaque_size.size() > 0) &&
10585 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10586 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10587 else
10588 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10589#endif
10590
Thierry Strudel04e026f2016-10-10 11:27:36 -070010591 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10592 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10593 size = 0;
10594 count = CAM_IR_MODE_MAX;
10595 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10596 for (size_t i = 0; i < count; i++) {
10597 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10598 gCamCapability[cameraId]->supported_ir_modes[i]);
10599 if (NAME_NOT_FOUND != val) {
10600 avail_ir_modes[size] = (int32_t)val;
10601 size++;
10602 }
10603 }
10604 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10605 avail_ir_modes, size);
10606 }
10607
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010608 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10609 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10610 size = 0;
10611 count = CAM_AEC_CONVERGENCE_MAX;
10612 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10613 for (size_t i = 0; i < count; i++) {
10614 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10615 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10616 if (NAME_NOT_FOUND != val) {
10617 available_instant_aec_modes[size] = (int32_t)val;
10618 size++;
10619 }
10620 }
10621 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10622 available_instant_aec_modes, size);
10623 }
10624
Thierry Strudel54dc9782017-02-15 12:12:10 -080010625 int32_t sharpness_range[] = {
10626 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10627 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10628 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10629
10630 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10631 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10632 size = 0;
10633 count = CAM_BINNING_CORRECTION_MODE_MAX;
10634 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10635 for (size_t i = 0; i < count; i++) {
10636 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10637 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10638 gCamCapability[cameraId]->supported_binning_modes[i]);
10639 if (NAME_NOT_FOUND != val) {
10640 avail_binning_modes[size] = (int32_t)val;
10641 size++;
10642 }
10643 }
10644 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10645 avail_binning_modes, size);
10646 }
10647
10648 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10649 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10650 size = 0;
10651 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10652 for (size_t i = 0; i < count; i++) {
10653 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10654 gCamCapability[cameraId]->supported_aec_modes[i]);
10655 if (NAME_NOT_FOUND != val)
10656 available_aec_modes[size++] = val;
10657 }
10658 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10659 available_aec_modes, size);
10660 }
10661
10662 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10663 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10664 size = 0;
10665 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10666 for (size_t i = 0; i < count; i++) {
10667 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10668 gCamCapability[cameraId]->supported_iso_modes[i]);
10669 if (NAME_NOT_FOUND != val)
10670 available_iso_modes[size++] = val;
10671 }
10672 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10673 available_iso_modes, size);
10674 }
10675
10676 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010677 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010678 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10679 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10680 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10681
10682 int32_t available_saturation_range[4];
10683 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10684 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10685 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10686 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10687 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10688 available_saturation_range, 4);
10689
10690 uint8_t is_hdr_values[2];
10691 is_hdr_values[0] = 0;
10692 is_hdr_values[1] = 1;
10693 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10694 is_hdr_values, 2);
10695
10696 float is_hdr_confidence_range[2];
10697 is_hdr_confidence_range[0] = 0.0;
10698 is_hdr_confidence_range[1] = 1.0;
10699 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10700 is_hdr_confidence_range, 2);
10701
Emilian Peev0a972ef2017-03-16 10:25:53 +000010702 size_t eepromLength = strnlen(
10703 reinterpret_cast<const char *>(
10704 gCamCapability[cameraId]->eeprom_version_info),
10705 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10706 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010707 char easelInfo[] = ",E:N";
10708 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10709 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10710 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010711 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
Arnd Geis082a4d72017-08-24 10:33:07 -070010712 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-ver" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010713 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010714 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010715 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10716 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10717 }
10718
Thierry Strudel3d639192016-09-09 11:52:26 -070010719 gStaticMetadata[cameraId] = staticInfo.release();
10720 return rc;
10721}
10722
10723/*===========================================================================
10724 * FUNCTION : makeTable
10725 *
10726 * DESCRIPTION: make a table of sizes
10727 *
10728 * PARAMETERS :
10729 *
10730 *
10731 *==========================================================================*/
10732void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10733 size_t max_size, int32_t *sizeTable)
10734{
10735 size_t j = 0;
10736 if (size > max_size) {
10737 size = max_size;
10738 }
10739 for (size_t i = 0; i < size; i++) {
10740 sizeTable[j] = dimTable[i].width;
10741 sizeTable[j+1] = dimTable[i].height;
10742 j+=2;
10743 }
10744}
10745
10746/*===========================================================================
10747 * FUNCTION : makeFPSTable
10748 *
10749 * DESCRIPTION: make a table of fps ranges
10750 *
10751 * PARAMETERS :
10752 *
10753 *==========================================================================*/
10754void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10755 size_t max_size, int32_t *fpsRangesTable)
10756{
10757 size_t j = 0;
10758 if (size > max_size) {
10759 size = max_size;
10760 }
10761 for (size_t i = 0; i < size; i++) {
10762 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10763 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10764 j+=2;
10765 }
10766}
10767
10768/*===========================================================================
10769 * FUNCTION : makeOverridesList
10770 *
10771 * DESCRIPTION: make a list of scene mode overrides
10772 *
10773 * PARAMETERS :
10774 *
10775 *
10776 *==========================================================================*/
10777void QCamera3HardwareInterface::makeOverridesList(
10778 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10779 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10780{
10781 /*daemon will give a list of overrides for all scene modes.
10782 However we should send the fwk only the overrides for the scene modes
10783 supported by the framework*/
10784 size_t j = 0;
10785 if (size > max_size) {
10786 size = max_size;
10787 }
10788 size_t focus_count = CAM_FOCUS_MODE_MAX;
10789 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10790 focus_count);
10791 for (size_t i = 0; i < size; i++) {
10792 bool supt = false;
10793 size_t index = supported_indexes[i];
10794 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10795 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10796 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10797 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10798 overridesTable[index].awb_mode);
10799 if (NAME_NOT_FOUND != val) {
10800 overridesList[j+1] = (uint8_t)val;
10801 }
10802 uint8_t focus_override = overridesTable[index].af_mode;
10803 for (size_t k = 0; k < focus_count; k++) {
10804 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10805 supt = true;
10806 break;
10807 }
10808 }
10809 if (supt) {
10810 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10811 focus_override);
10812 if (NAME_NOT_FOUND != val) {
10813 overridesList[j+2] = (uint8_t)val;
10814 }
10815 } else {
10816 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10817 }
10818 j+=3;
10819 }
10820}
10821
10822/*===========================================================================
10823 * FUNCTION : filterJpegSizes
10824 *
10825 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10826 * could be downscaled to
10827 *
10828 * PARAMETERS :
10829 *
10830 * RETURN : length of jpegSizes array
10831 *==========================================================================*/
10832
10833size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10834 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10835 uint8_t downscale_factor)
10836{
10837 if (0 == downscale_factor) {
10838 downscale_factor = 1;
10839 }
10840
10841 int32_t min_width = active_array_size.width / downscale_factor;
10842 int32_t min_height = active_array_size.height / downscale_factor;
10843 size_t jpegSizesCnt = 0;
10844 if (processedSizesCnt > maxCount) {
10845 processedSizesCnt = maxCount;
10846 }
10847 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10848 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10849 jpegSizes[jpegSizesCnt] = processedSizes[i];
10850 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10851 jpegSizesCnt += 2;
10852 }
10853 }
10854 return jpegSizesCnt;
10855}
10856
10857/*===========================================================================
10858 * FUNCTION : computeNoiseModelEntryS
10859 *
10860 * DESCRIPTION: function to map a given sensitivity to the S noise
10861 * model parameters in the DNG noise model.
10862 *
10863 * PARAMETERS : sens : the sensor sensitivity
10864 *
10865 ** RETURN : S (sensor amplification) noise
10866 *
10867 *==========================================================================*/
10868double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10869 double s = gCamCapability[mCameraId]->gradient_S * sens +
10870 gCamCapability[mCameraId]->offset_S;
10871 return ((s < 0.0) ? 0.0 : s);
10872}
10873
10874/*===========================================================================
10875 * FUNCTION : computeNoiseModelEntryO
10876 *
10877 * DESCRIPTION: function to map a given sensitivity to the O noise
10878 * model parameters in the DNG noise model.
10879 *
10880 * PARAMETERS : sens : the sensor sensitivity
10881 *
10882 ** RETURN : O (sensor readout) noise
10883 *
10884 *==========================================================================*/
10885double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10886 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10887 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10888 1.0 : (1.0 * sens / max_analog_sens);
10889 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10890 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10891 return ((o < 0.0) ? 0.0 : o);
10892}
10893
10894/*===========================================================================
10895 * FUNCTION : getSensorSensitivity
10896 *
10897 * DESCRIPTION: convert iso_mode to an integer value
10898 *
10899 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10900 *
10901 ** RETURN : sensitivity supported by sensor
10902 *
10903 *==========================================================================*/
10904int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10905{
10906 int32_t sensitivity;
10907
10908 switch (iso_mode) {
10909 case CAM_ISO_MODE_100:
10910 sensitivity = 100;
10911 break;
10912 case CAM_ISO_MODE_200:
10913 sensitivity = 200;
10914 break;
10915 case CAM_ISO_MODE_400:
10916 sensitivity = 400;
10917 break;
10918 case CAM_ISO_MODE_800:
10919 sensitivity = 800;
10920 break;
10921 case CAM_ISO_MODE_1600:
10922 sensitivity = 1600;
10923 break;
10924 default:
10925 sensitivity = -1;
10926 break;
10927 }
10928 return sensitivity;
10929}
10930
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010931int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010932 if (gEaselManagerClient == nullptr) {
10933 gEaselManagerClient = EaselManagerClient::create();
10934 if (gEaselManagerClient == nullptr) {
10935 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10936 return -ENODEV;
10937 }
10938 }
10939
10940 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010941 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10942 // to connect to Easel.
10943 bool doNotpowerOnEasel =
10944 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10945
10946 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010947 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10948 return OK;
10949 }
10950
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010951 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010952 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010953 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010954 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010955 return res;
10956 }
10957
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010958 EaselManagerClientOpened = true;
10959
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010960 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010961 if (res != OK) {
10962 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10963 }
10964
Chien-Yu Chen4d752e32017-06-07 12:13:24 -070010965 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", true);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010966 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010967
10968 // Expose enableZsl key only when HDR+ mode is enabled.
10969 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010970 }
10971
10972 return OK;
10973}
10974
Thierry Strudel3d639192016-09-09 11:52:26 -070010975/*===========================================================================
10976 * FUNCTION : getCamInfo
10977 *
10978 * DESCRIPTION: query camera capabilities
10979 *
10980 * PARAMETERS :
10981 * @cameraId : camera Id
10982 * @info : camera info struct to be filled in with camera capabilities
10983 *
10984 * RETURN : int type of status
10985 * NO_ERROR -- success
10986 * none-zero failure code
10987 *==========================================================================*/
10988int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10989 struct camera_info *info)
10990{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010991 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010992 int rc = 0;
10993
10994 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010995
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010996 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070010997 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010998 rc = initHdrPlusClientLocked();
10999 if (rc != OK) {
11000 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
11001 pthread_mutex_unlock(&gCamLock);
11002 return rc;
11003 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070011004 }
11005
Thierry Strudel3d639192016-09-09 11:52:26 -070011006 if (NULL == gCamCapability[cameraId]) {
11007 rc = initCapabilities(cameraId);
11008 if (rc < 0) {
11009 pthread_mutex_unlock(&gCamLock);
11010 return rc;
11011 }
11012 }
11013
11014 if (NULL == gStaticMetadata[cameraId]) {
11015 rc = initStaticMetadata(cameraId);
11016 if (rc < 0) {
11017 pthread_mutex_unlock(&gCamLock);
11018 return rc;
11019 }
11020 }
11021
11022 switch(gCamCapability[cameraId]->position) {
11023 case CAM_POSITION_BACK:
11024 case CAM_POSITION_BACK_AUX:
11025 info->facing = CAMERA_FACING_BACK;
11026 break;
11027
11028 case CAM_POSITION_FRONT:
11029 case CAM_POSITION_FRONT_AUX:
11030 info->facing = CAMERA_FACING_FRONT;
11031 break;
11032
11033 default:
11034 LOGE("Unknown position type %d for camera id:%d",
11035 gCamCapability[cameraId]->position, cameraId);
11036 rc = -1;
11037 break;
11038 }
11039
11040
11041 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011042#ifndef USE_HAL_3_3
11043 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11044#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011045 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011046#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011047 info->static_camera_characteristics = gStaticMetadata[cameraId];
11048
11049 //For now assume both cameras can operate independently.
11050 info->conflicting_devices = NULL;
11051 info->conflicting_devices_length = 0;
11052
11053 //resource cost is 100 * MIN(1.0, m/M),
11054 //where m is throughput requirement with maximum stream configuration
11055 //and M is CPP maximum throughput.
11056 float max_fps = 0.0;
11057 for (uint32_t i = 0;
11058 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11059 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11060 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11061 }
11062 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11063 gCamCapability[cameraId]->active_array_size.width *
11064 gCamCapability[cameraId]->active_array_size.height * max_fps /
11065 gCamCapability[cameraId]->max_pixel_bandwidth;
11066 info->resource_cost = 100 * MIN(1.0, ratio);
11067 LOGI("camera %d resource cost is %d", cameraId,
11068 info->resource_cost);
11069
11070 pthread_mutex_unlock(&gCamLock);
11071 return rc;
11072}
11073
11074/*===========================================================================
11075 * FUNCTION : translateCapabilityToMetadata
11076 *
11077 * DESCRIPTION: translate the capability into camera_metadata_t
11078 *
11079 * PARAMETERS : type of the request
11080 *
11081 *
11082 * RETURN : success: camera_metadata_t*
11083 * failure: NULL
11084 *
11085 *==========================================================================*/
11086camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11087{
11088 if (mDefaultMetadata[type] != NULL) {
11089 return mDefaultMetadata[type];
11090 }
11091 //first time we are handling this request
11092 //fill up the metadata structure using the wrapper class
11093 CameraMetadata settings;
11094 //translate from cam_capability_t to camera_metadata_tag_t
11095 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11096 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11097 int32_t defaultRequestID = 0;
11098 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11099
11100 /* OIS disable */
11101 char ois_prop[PROPERTY_VALUE_MAX];
11102 memset(ois_prop, 0, sizeof(ois_prop));
11103 property_get("persist.camera.ois.disable", ois_prop, "0");
11104 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11105
11106 /* Force video to use OIS */
11107 char videoOisProp[PROPERTY_VALUE_MAX];
11108 memset(videoOisProp, 0, sizeof(videoOisProp));
11109 property_get("persist.camera.ois.video", videoOisProp, "1");
11110 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011111
11112 // Hybrid AE enable/disable
11113 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11114 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11115 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011116 uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011117
Thierry Strudel3d639192016-09-09 11:52:26 -070011118 uint8_t controlIntent = 0;
11119 uint8_t focusMode;
11120 uint8_t vsMode;
11121 uint8_t optStabMode;
11122 uint8_t cacMode;
11123 uint8_t edge_mode;
11124 uint8_t noise_red_mode;
11125 uint8_t tonemap_mode;
11126 bool highQualityModeEntryAvailable = FALSE;
11127 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011128 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011129 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11130 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011131 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011132 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011133 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011134
Thierry Strudel3d639192016-09-09 11:52:26 -070011135 switch (type) {
11136 case CAMERA3_TEMPLATE_PREVIEW:
11137 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11138 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11139 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11140 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11141 edge_mode = ANDROID_EDGE_MODE_FAST;
11142 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11143 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11144 break;
11145 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11146 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11147 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11148 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11149 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11150 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11151 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11152 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11153 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11154 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11155 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11156 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11157 highQualityModeEntryAvailable = TRUE;
11158 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11159 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11160 fastModeEntryAvailable = TRUE;
11161 }
11162 }
11163 if (highQualityModeEntryAvailable) {
11164 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11165 } else if (fastModeEntryAvailable) {
11166 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11167 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011168 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11169 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11170 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011171 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011172 break;
11173 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11174 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11175 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11176 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011177 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11178 edge_mode = ANDROID_EDGE_MODE_FAST;
11179 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11180 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11181 if (forceVideoOis)
11182 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11183 break;
11184 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11185 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11186 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11187 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011188 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11189 edge_mode = ANDROID_EDGE_MODE_FAST;
11190 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11191 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11192 if (forceVideoOis)
11193 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11194 break;
11195 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11196 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11197 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11198 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11199 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11200 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11201 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11202 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11203 break;
11204 case CAMERA3_TEMPLATE_MANUAL:
11205 edge_mode = ANDROID_EDGE_MODE_FAST;
11206 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11207 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11208 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11209 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11210 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11211 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11212 break;
11213 default:
11214 edge_mode = ANDROID_EDGE_MODE_FAST;
11215 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11216 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11217 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11218 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11219 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11220 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11221 break;
11222 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011223 // Set CAC to OFF if underlying device doesn't support
11224 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11225 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11226 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011227 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11228 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11229 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11230 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11231 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11232 }
11233 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011234 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011235 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011236
11237 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11238 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11239 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11240 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11241 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11242 || ois_disable)
11243 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11244 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011245 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011246
11247 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11248 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11249
11250 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11251 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11252
11253 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11254 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11255
11256 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11257 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11258
11259 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11260 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11261
11262 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11263 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11264
11265 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11266 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11267
11268 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11269 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11270
11271 /*flash*/
11272 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11273 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11274
11275 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11276 settings.update(ANDROID_FLASH_FIRING_POWER,
11277 &flashFiringLevel, 1);
11278
11279 /* lens */
11280 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11281 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11282
11283 if (gCamCapability[mCameraId]->filter_densities_count) {
11284 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11285 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11286 gCamCapability[mCameraId]->filter_densities_count);
11287 }
11288
11289 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11290 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11291
Thierry Strudel3d639192016-09-09 11:52:26 -070011292 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11293 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11294
11295 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11296 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11297
11298 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11299 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11300
11301 /* face detection (default to OFF) */
11302 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11303 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11304
Thierry Strudel54dc9782017-02-15 12:12:10 -080011305 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11306 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011307
11308 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11309 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11310
11311 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11312 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11313
Thierry Strudel3d639192016-09-09 11:52:26 -070011314
11315 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11316 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11317
11318 /* Exposure time(Update the Min Exposure Time)*/
11319 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11320 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11321
11322 /* frame duration */
11323 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11324 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11325
11326 /* sensitivity */
11327 static const int32_t default_sensitivity = 100;
11328 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011329#ifndef USE_HAL_3_3
11330 static const int32_t default_isp_sensitivity =
11331 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11332 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11333#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011334
11335 /*edge mode*/
11336 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11337
11338 /*noise reduction mode*/
11339 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11340
11341 /*color correction mode*/
11342 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11343 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11344
11345 /*transform matrix mode*/
11346 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11347
11348 int32_t scaler_crop_region[4];
11349 scaler_crop_region[0] = 0;
11350 scaler_crop_region[1] = 0;
11351 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11352 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11353 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11354
11355 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11356 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11357
11358 /*focus distance*/
11359 float focus_distance = 0.0;
11360 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11361
11362 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011363 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011364 float max_range = 0.0;
11365 float max_fixed_fps = 0.0;
11366 int32_t fps_range[2] = {0, 0};
11367 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11368 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011369 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11370 TEMPLATE_MAX_PREVIEW_FPS) {
11371 continue;
11372 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011373 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11374 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11375 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11376 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11377 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11378 if (range > max_range) {
11379 fps_range[0] =
11380 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11381 fps_range[1] =
11382 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11383 max_range = range;
11384 }
11385 } else {
11386 if (range < 0.01 && max_fixed_fps <
11387 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11388 fps_range[0] =
11389 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11390 fps_range[1] =
11391 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11392 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11393 }
11394 }
11395 }
11396 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11397
11398 /*precapture trigger*/
11399 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11400 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11401
11402 /*af trigger*/
11403 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11404 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11405
11406 /* ae & af regions */
11407 int32_t active_region[] = {
11408 gCamCapability[mCameraId]->active_array_size.left,
11409 gCamCapability[mCameraId]->active_array_size.top,
11410 gCamCapability[mCameraId]->active_array_size.left +
11411 gCamCapability[mCameraId]->active_array_size.width,
11412 gCamCapability[mCameraId]->active_array_size.top +
11413 gCamCapability[mCameraId]->active_array_size.height,
11414 0};
11415 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11416 sizeof(active_region) / sizeof(active_region[0]));
11417 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11418 sizeof(active_region) / sizeof(active_region[0]));
11419
11420 /* black level lock */
11421 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11422 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11423
Thierry Strudel3d639192016-09-09 11:52:26 -070011424 //special defaults for manual template
11425 if (type == CAMERA3_TEMPLATE_MANUAL) {
11426 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11427 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11428
11429 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11430 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11431
11432 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11433 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11434
11435 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11436 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11437
11438 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11439 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11440
11441 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11442 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11443 }
11444
11445
11446 /* TNR
11447 * We'll use this location to determine which modes TNR will be set.
11448 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11449 * This is not to be confused with linking on a per stream basis that decision
11450 * is still on per-session basis and will be handled as part of config stream
11451 */
11452 uint8_t tnr_enable = 0;
11453
11454 if (m_bTnrPreview || m_bTnrVideo) {
11455
11456 switch (type) {
11457 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11458 tnr_enable = 1;
11459 break;
11460
11461 default:
11462 tnr_enable = 0;
11463 break;
11464 }
11465
11466 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11467 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11468 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11469
11470 LOGD("TNR:%d with process plate %d for template:%d",
11471 tnr_enable, tnr_process_type, type);
11472 }
11473
11474 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011475 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011476 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11477
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011478 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011479 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11480
Shuzhen Wang920ea402017-05-03 08:49:39 -070011481 uint8_t related_camera_id = mCameraId;
11482 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011483
11484 /* CDS default */
11485 char prop[PROPERTY_VALUE_MAX];
11486 memset(prop, 0, sizeof(prop));
11487 property_get("persist.camera.CDS", prop, "Auto");
11488 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11489 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11490 if (CAM_CDS_MODE_MAX == cds_mode) {
11491 cds_mode = CAM_CDS_MODE_AUTO;
11492 }
11493
11494 /* Disabling CDS in templates which have TNR enabled*/
11495 if (tnr_enable)
11496 cds_mode = CAM_CDS_MODE_OFF;
11497
11498 int32_t mode = cds_mode;
11499 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011500
Thierry Strudel269c81a2016-10-12 12:13:59 -070011501 /* Manual Convergence AEC Speed is disabled by default*/
11502 float default_aec_speed = 0;
11503 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11504
11505 /* Manual Convergence AWB Speed is disabled by default*/
11506 float default_awb_speed = 0;
11507 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11508
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011509 // Set instant AEC to normal convergence by default
11510 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11511 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11512
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011513 if (gExposeEnableZslKey) {
11514 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070011515 int32_t postview = 0;
11516 settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070011517 int32_t continuousZslCapture = 0;
11518 settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
Chien-Yu Chenec328c82017-08-30 16:41:08 -070011519 // Disable HDR+ for templates other than CAMERA3_TEMPLATE_STILL_CAPTURE.
11520 int32_t disableHdrplus = (type == CAMERA3_TEMPLATE_STILL_CAPTURE) ? 0 : 1;
11521 settings.update(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS, &disableHdrplus, 1);
11522
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011523 // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
11524 // hybrid ae is enabled for 3rd party app HDR+.
11525 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11526 type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
11527 hybrid_ae = 1;
11528 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011529 }
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011530 /* hybrid ae */
11531 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011532
Thierry Strudel3d639192016-09-09 11:52:26 -070011533 mDefaultMetadata[type] = settings.release();
11534
11535 return mDefaultMetadata[type];
11536}
11537
11538/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011539 * FUNCTION : getExpectedFrameDuration
11540 *
11541 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11542 * duration
11543 *
11544 * PARAMETERS :
11545 * @request : request settings
11546 * @frameDuration : The maximum frame duration in nanoseconds
11547 *
11548 * RETURN : None
11549 *==========================================================================*/
11550void QCamera3HardwareInterface::getExpectedFrameDuration(
11551 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11552 if (nullptr == frameDuration) {
11553 return;
11554 }
11555
11556 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11557 find_camera_metadata_ro_entry(request,
11558 ANDROID_SENSOR_EXPOSURE_TIME,
11559 &e);
11560 if (e.count > 0) {
11561 *frameDuration = e.data.i64[0];
11562 }
11563 find_camera_metadata_ro_entry(request,
11564 ANDROID_SENSOR_FRAME_DURATION,
11565 &e);
11566 if (e.count > 0) {
11567 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11568 }
11569}
11570
11571/*===========================================================================
11572 * FUNCTION : calculateMaxExpectedDuration
11573 *
11574 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11575 * current camera settings.
11576 *
11577 * PARAMETERS :
11578 * @request : request settings
11579 *
11580 * RETURN : Expected frame duration in nanoseconds.
11581 *==========================================================================*/
11582nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11583 const camera_metadata_t *request) {
11584 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11585 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11586 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11587 if (e.count == 0) {
11588 return maxExpectedDuration;
11589 }
11590
11591 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11592 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11593 }
11594
11595 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11596 return maxExpectedDuration;
11597 }
11598
11599 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11600 if (e.count == 0) {
11601 return maxExpectedDuration;
11602 }
11603
11604 switch (e.data.u8[0]) {
11605 case ANDROID_CONTROL_AE_MODE_OFF:
11606 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11607 break;
11608 default:
11609 find_camera_metadata_ro_entry(request,
11610 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11611 &e);
11612 if (e.count > 1) {
11613 maxExpectedDuration = 1e9 / e.data.u8[0];
11614 }
11615 break;
11616 }
11617
11618 return maxExpectedDuration;
11619}
11620
11621/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011622 * FUNCTION : setFrameParameters
11623 *
11624 * DESCRIPTION: set parameters per frame as requested in the metadata from
11625 * framework
11626 *
11627 * PARAMETERS :
11628 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011629 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011630 * @blob_request: Whether this request is a blob request or not
11631 *
11632 * RETURN : success: NO_ERROR
11633 * failure:
11634 *==========================================================================*/
11635int QCamera3HardwareInterface::setFrameParameters(
11636 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011637 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011638 int blob_request,
11639 uint32_t snapshotStreamId)
11640{
11641 /*translate from camera_metadata_t type to parm_type_t*/
11642 int rc = 0;
11643 int32_t hal_version = CAM_HAL_V3;
11644
11645 clear_metadata_buffer(mParameters);
11646 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11647 LOGE("Failed to set hal version in the parameters");
11648 return BAD_VALUE;
11649 }
11650
11651 /*we need to update the frame number in the parameters*/
11652 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11653 request->frame_number)) {
11654 LOGE("Failed to set the frame number in the parameters");
11655 return BAD_VALUE;
11656 }
11657
11658 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011659 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011660 LOGE("Failed to set stream type mask in the parameters");
11661 return BAD_VALUE;
11662 }
11663
11664 if (mUpdateDebugLevel) {
11665 uint32_t dummyDebugLevel = 0;
11666 /* The value of dummyDebugLevel is irrelavent. On
11667 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11668 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11669 dummyDebugLevel)) {
11670 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11671 return BAD_VALUE;
11672 }
11673 mUpdateDebugLevel = false;
11674 }
11675
11676 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011677 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011678 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11679 if (blob_request)
11680 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11681 }
11682
11683 return rc;
11684}
11685
11686/*===========================================================================
11687 * FUNCTION : setReprocParameters
11688 *
11689 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11690 * return it.
11691 *
11692 * PARAMETERS :
11693 * @request : request that needs to be serviced
11694 *
11695 * RETURN : success: NO_ERROR
11696 * failure:
11697 *==========================================================================*/
11698int32_t QCamera3HardwareInterface::setReprocParameters(
11699 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11700 uint32_t snapshotStreamId)
11701{
11702 /*translate from camera_metadata_t type to parm_type_t*/
11703 int rc = 0;
11704
11705 if (NULL == request->settings){
11706 LOGE("Reprocess settings cannot be NULL");
11707 return BAD_VALUE;
11708 }
11709
11710 if (NULL == reprocParam) {
11711 LOGE("Invalid reprocessing metadata buffer");
11712 return BAD_VALUE;
11713 }
11714 clear_metadata_buffer(reprocParam);
11715
11716 /*we need to update the frame number in the parameters*/
11717 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11718 request->frame_number)) {
11719 LOGE("Failed to set the frame number in the parameters");
11720 return BAD_VALUE;
11721 }
11722
11723 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11724 if (rc < 0) {
11725 LOGE("Failed to translate reproc request");
11726 return rc;
11727 }
11728
11729 CameraMetadata frame_settings;
11730 frame_settings = request->settings;
11731 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11732 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11733 int32_t *crop_count =
11734 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11735 int32_t *crop_data =
11736 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11737 int32_t *roi_map =
11738 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11739 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11740 cam_crop_data_t crop_meta;
11741 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11742 crop_meta.num_of_streams = 1;
11743 crop_meta.crop_info[0].crop.left = crop_data[0];
11744 crop_meta.crop_info[0].crop.top = crop_data[1];
11745 crop_meta.crop_info[0].crop.width = crop_data[2];
11746 crop_meta.crop_info[0].crop.height = crop_data[3];
11747
11748 crop_meta.crop_info[0].roi_map.left =
11749 roi_map[0];
11750 crop_meta.crop_info[0].roi_map.top =
11751 roi_map[1];
11752 crop_meta.crop_info[0].roi_map.width =
11753 roi_map[2];
11754 crop_meta.crop_info[0].roi_map.height =
11755 roi_map[3];
11756
11757 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11758 rc = BAD_VALUE;
11759 }
11760 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11761 request->input_buffer->stream,
11762 crop_meta.crop_info[0].crop.left,
11763 crop_meta.crop_info[0].crop.top,
11764 crop_meta.crop_info[0].crop.width,
11765 crop_meta.crop_info[0].crop.height);
11766 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11767 request->input_buffer->stream,
11768 crop_meta.crop_info[0].roi_map.left,
11769 crop_meta.crop_info[0].roi_map.top,
11770 crop_meta.crop_info[0].roi_map.width,
11771 crop_meta.crop_info[0].roi_map.height);
11772 } else {
11773 LOGE("Invalid reprocess crop count %d!", *crop_count);
11774 }
11775 } else {
11776 LOGE("No crop data from matching output stream");
11777 }
11778
11779 /* These settings are not needed for regular requests so handle them specially for
11780 reprocess requests; information needed for EXIF tags */
11781 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11782 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11783 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11784 if (NAME_NOT_FOUND != val) {
11785 uint32_t flashMode = (uint32_t)val;
11786 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11787 rc = BAD_VALUE;
11788 }
11789 } else {
11790 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11791 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11792 }
11793 } else {
11794 LOGH("No flash mode in reprocess settings");
11795 }
11796
11797 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11798 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11799 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11800 rc = BAD_VALUE;
11801 }
11802 } else {
11803 LOGH("No flash state in reprocess settings");
11804 }
11805
11806 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11807 uint8_t *reprocessFlags =
11808 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11809 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11810 *reprocessFlags)) {
11811 rc = BAD_VALUE;
11812 }
11813 }
11814
Thierry Strudel54dc9782017-02-15 12:12:10 -080011815 // Add exif debug data to internal metadata
11816 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11817 mm_jpeg_debug_exif_params_t *debug_params =
11818 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11819 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11820 // AE
11821 if (debug_params->ae_debug_params_valid == TRUE) {
11822 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11823 debug_params->ae_debug_params);
11824 }
11825 // AWB
11826 if (debug_params->awb_debug_params_valid == TRUE) {
11827 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11828 debug_params->awb_debug_params);
11829 }
11830 // AF
11831 if (debug_params->af_debug_params_valid == TRUE) {
11832 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11833 debug_params->af_debug_params);
11834 }
11835 // ASD
11836 if (debug_params->asd_debug_params_valid == TRUE) {
11837 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11838 debug_params->asd_debug_params);
11839 }
11840 // Stats
11841 if (debug_params->stats_debug_params_valid == TRUE) {
11842 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11843 debug_params->stats_debug_params);
11844 }
11845 // BE Stats
11846 if (debug_params->bestats_debug_params_valid == TRUE) {
11847 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11848 debug_params->bestats_debug_params);
11849 }
11850 // BHIST
11851 if (debug_params->bhist_debug_params_valid == TRUE) {
11852 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11853 debug_params->bhist_debug_params);
11854 }
11855 // 3A Tuning
11856 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11857 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11858 debug_params->q3a_tuning_debug_params);
11859 }
11860 }
11861
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011862 // Add metadata which reprocess needs
11863 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11864 cam_reprocess_info_t *repro_info =
11865 (cam_reprocess_info_t *)frame_settings.find
11866 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011867 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011868 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011869 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011870 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011871 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011872 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011873 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011874 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011875 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011876 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011877 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011878 repro_info->pipeline_flip);
11879 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11880 repro_info->af_roi);
11881 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11882 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011883 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11884 CAM_INTF_PARM_ROTATION metadata then has been added in
11885 translateToHalMetadata. HAL need to keep this new rotation
11886 metadata. Otherwise, the old rotation info saved in the vendor tag
11887 would be used */
11888 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11889 CAM_INTF_PARM_ROTATION, reprocParam) {
11890 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11891 } else {
11892 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011893 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011894 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011895 }
11896
11897 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11898 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11899 roi.width and roi.height would be the final JPEG size.
11900 For now, HAL only checks this for reprocess request */
11901 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11902 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11903 uint8_t *enable =
11904 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11905 if (*enable == TRUE) {
11906 int32_t *crop_data =
11907 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11908 cam_stream_crop_info_t crop_meta;
11909 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11910 crop_meta.stream_id = 0;
11911 crop_meta.crop.left = crop_data[0];
11912 crop_meta.crop.top = crop_data[1];
11913 crop_meta.crop.width = crop_data[2];
11914 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011915 // The JPEG crop roi should match cpp output size
11916 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11917 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11918 crop_meta.roi_map.left = 0;
11919 crop_meta.roi_map.top = 0;
11920 crop_meta.roi_map.width = cpp_crop->crop.width;
11921 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011922 }
11923 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11924 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011925 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011926 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011927 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11928 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011929 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011930 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11931
11932 // Add JPEG scale information
11933 cam_dimension_t scale_dim;
11934 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11935 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11936 int32_t *roi =
11937 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11938 scale_dim.width = roi[2];
11939 scale_dim.height = roi[3];
11940 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11941 scale_dim);
11942 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11943 scale_dim.width, scale_dim.height, mCameraId);
11944 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011945 }
11946 }
11947
11948 return rc;
11949}
11950
11951/*===========================================================================
11952 * FUNCTION : saveRequestSettings
11953 *
11954 * DESCRIPTION: Add any settings that might have changed to the request settings
11955 * and save the settings to be applied on the frame
11956 *
11957 * PARAMETERS :
11958 * @jpegMetadata : the extracted and/or modified jpeg metadata
11959 * @request : request with initial settings
11960 *
11961 * RETURN :
11962 * camera_metadata_t* : pointer to the saved request settings
11963 *==========================================================================*/
11964camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11965 const CameraMetadata &jpegMetadata,
11966 camera3_capture_request_t *request)
11967{
11968 camera_metadata_t *resultMetadata;
11969 CameraMetadata camMetadata;
11970 camMetadata = request->settings;
11971
11972 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11973 int32_t thumbnail_size[2];
11974 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11975 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11976 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11977 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11978 }
11979
11980 if (request->input_buffer != NULL) {
11981 uint8_t reprocessFlags = 1;
11982 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11983 (uint8_t*)&reprocessFlags,
11984 sizeof(reprocessFlags));
11985 }
11986
11987 resultMetadata = camMetadata.release();
11988 return resultMetadata;
11989}
11990
11991/*===========================================================================
11992 * FUNCTION : setHalFpsRange
11993 *
11994 * DESCRIPTION: set FPS range parameter
11995 *
11996 *
11997 * PARAMETERS :
11998 * @settings : Metadata from framework
11999 * @hal_metadata: Metadata buffer
12000 *
12001 *
12002 * RETURN : success: NO_ERROR
12003 * failure:
12004 *==========================================================================*/
12005int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
12006 metadata_buffer_t *hal_metadata)
12007{
12008 int32_t rc = NO_ERROR;
12009 cam_fps_range_t fps_range;
12010 fps_range.min_fps = (float)
12011 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
12012 fps_range.max_fps = (float)
12013 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
12014 fps_range.video_min_fps = fps_range.min_fps;
12015 fps_range.video_max_fps = fps_range.max_fps;
12016
12017 LOGD("aeTargetFpsRange fps: [%f %f]",
12018 fps_range.min_fps, fps_range.max_fps);
12019 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
12020 * follows:
12021 * ---------------------------------------------------------------|
12022 * Video stream is absent in configure_streams |
12023 * (Camcorder preview before the first video record |
12024 * ---------------------------------------------------------------|
12025 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12026 * | | | vid_min/max_fps|
12027 * ---------------------------------------------------------------|
12028 * NO | [ 30, 240] | 240 | [240, 240] |
12029 * |-------------|-------------|----------------|
12030 * | [240, 240] | 240 | [240, 240] |
12031 * ---------------------------------------------------------------|
12032 * Video stream is present in configure_streams |
12033 * ---------------------------------------------------------------|
12034 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12035 * | | | vid_min/max_fps|
12036 * ---------------------------------------------------------------|
12037 * NO | [ 30, 240] | 240 | [240, 240] |
12038 * (camcorder prev |-------------|-------------|----------------|
12039 * after video rec | [240, 240] | 240 | [240, 240] |
12040 * is stopped) | | | |
12041 * ---------------------------------------------------------------|
12042 * YES | [ 30, 240] | 240 | [240, 240] |
12043 * |-------------|-------------|----------------|
12044 * | [240, 240] | 240 | [240, 240] |
12045 * ---------------------------------------------------------------|
12046 * When Video stream is absent in configure_streams,
12047 * preview fps = sensor_fps / batchsize
12048 * Eg: for 240fps at batchSize 4, preview = 60fps
12049 * for 120fps at batchSize 4, preview = 30fps
12050 *
12051 * When video stream is present in configure_streams, preview fps is as per
12052 * the ratio of preview buffers to video buffers requested in process
12053 * capture request
12054 */
12055 mBatchSize = 0;
12056 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12057 fps_range.min_fps = fps_range.video_max_fps;
12058 fps_range.video_min_fps = fps_range.video_max_fps;
12059 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12060 fps_range.max_fps);
12061 if (NAME_NOT_FOUND != val) {
12062 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12063 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12064 return BAD_VALUE;
12065 }
12066
12067 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12068 /* If batchmode is currently in progress and the fps changes,
12069 * set the flag to restart the sensor */
12070 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12071 (mHFRVideoFps != fps_range.max_fps)) {
12072 mNeedSensorRestart = true;
12073 }
12074 mHFRVideoFps = fps_range.max_fps;
12075 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12076 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12077 mBatchSize = MAX_HFR_BATCH_SIZE;
12078 }
12079 }
12080 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12081
12082 }
12083 } else {
12084 /* HFR mode is session param in backend/ISP. This should be reset when
12085 * in non-HFR mode */
12086 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12087 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12088 return BAD_VALUE;
12089 }
12090 }
12091 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12092 return BAD_VALUE;
12093 }
12094 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12095 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12096 return rc;
12097}
12098
12099/*===========================================================================
12100 * FUNCTION : translateToHalMetadata
12101 *
12102 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12103 *
12104 *
12105 * PARAMETERS :
12106 * @request : request sent from framework
12107 *
12108 *
12109 * RETURN : success: NO_ERROR
12110 * failure:
12111 *==========================================================================*/
12112int QCamera3HardwareInterface::translateToHalMetadata
12113 (const camera3_capture_request_t *request,
12114 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012115 uint32_t snapshotStreamId) {
12116 if (request == nullptr || hal_metadata == nullptr) {
12117 return BAD_VALUE;
12118 }
12119
12120 int64_t minFrameDuration = getMinFrameDuration(request);
12121
12122 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12123 minFrameDuration);
12124}
12125
12126int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12127 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12128 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12129
Thierry Strudel3d639192016-09-09 11:52:26 -070012130 int rc = 0;
12131 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012132 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012133
12134 /* Do not change the order of the following list unless you know what you are
12135 * doing.
12136 * The order is laid out in such a way that parameters in the front of the table
12137 * may be used to override the parameters later in the table. Examples are:
12138 * 1. META_MODE should precede AEC/AWB/AF MODE
12139 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12140 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12141 * 4. Any mode should precede it's corresponding settings
12142 */
12143 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12144 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12145 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12146 rc = BAD_VALUE;
12147 }
12148 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12149 if (rc != NO_ERROR) {
12150 LOGE("extractSceneMode failed");
12151 }
12152 }
12153
12154 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12155 uint8_t fwk_aeMode =
12156 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12157 uint8_t aeMode;
12158 int32_t redeye;
12159
12160 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12161 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012162 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12163 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012164 } else {
12165 aeMode = CAM_AE_MODE_ON;
12166 }
12167 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12168 redeye = 1;
12169 } else {
12170 redeye = 0;
12171 }
12172
12173 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12174 fwk_aeMode);
12175 if (NAME_NOT_FOUND != val) {
12176 int32_t flashMode = (int32_t)val;
12177 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12178 }
12179
12180 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12181 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12182 rc = BAD_VALUE;
12183 }
12184 }
12185
12186 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12187 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12188 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12189 fwk_whiteLevel);
12190 if (NAME_NOT_FOUND != val) {
12191 uint8_t whiteLevel = (uint8_t)val;
12192 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12193 rc = BAD_VALUE;
12194 }
12195 }
12196 }
12197
12198 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12199 uint8_t fwk_cacMode =
12200 frame_settings.find(
12201 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12202 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12203 fwk_cacMode);
12204 if (NAME_NOT_FOUND != val) {
12205 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12206 bool entryAvailable = FALSE;
12207 // Check whether Frameworks set CAC mode is supported in device or not
12208 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12209 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12210 entryAvailable = TRUE;
12211 break;
12212 }
12213 }
12214 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12215 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12216 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12217 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12218 if (entryAvailable == FALSE) {
12219 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12220 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12221 } else {
12222 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12223 // High is not supported and so set the FAST as spec say's underlying
12224 // device implementation can be the same for both modes.
12225 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12226 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12227 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12228 // in order to avoid the fps drop due to high quality
12229 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12230 } else {
12231 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12232 }
12233 }
12234 }
12235 LOGD("Final cacMode is %d", cacMode);
12236 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12237 rc = BAD_VALUE;
12238 }
12239 } else {
12240 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12241 }
12242 }
12243
Jason Lee84ae9972017-02-24 13:24:24 -080012244 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012245 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012246 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012247 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012248 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12249 fwk_focusMode);
12250 if (NAME_NOT_FOUND != val) {
12251 uint8_t focusMode = (uint8_t)val;
12252 LOGD("set focus mode %d", focusMode);
12253 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12254 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12255 rc = BAD_VALUE;
12256 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012257 }
12258 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012259 } else {
12260 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12261 LOGE("Focus forced to infinity %d", focusMode);
12262 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12263 rc = BAD_VALUE;
12264 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012265 }
12266
Jason Lee84ae9972017-02-24 13:24:24 -080012267 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12268 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012269 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12270 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12271 focalDistance)) {
12272 rc = BAD_VALUE;
12273 }
12274 }
12275
12276 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12277 uint8_t fwk_antibandingMode =
12278 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12279 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12280 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12281 if (NAME_NOT_FOUND != val) {
12282 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012283 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12284 if (m60HzZone) {
12285 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12286 } else {
12287 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12288 }
12289 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012290 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12291 hal_antibandingMode)) {
12292 rc = BAD_VALUE;
12293 }
12294 }
12295 }
12296
12297 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12298 int32_t expCompensation = frame_settings.find(
12299 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12300 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12301 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12302 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12303 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012304 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012305 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12306 expCompensation)) {
12307 rc = BAD_VALUE;
12308 }
12309 }
12310
12311 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12312 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12313 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12314 rc = BAD_VALUE;
12315 }
12316 }
12317 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12318 rc = setHalFpsRange(frame_settings, hal_metadata);
12319 if (rc != NO_ERROR) {
12320 LOGE("setHalFpsRange failed");
12321 }
12322 }
12323
12324 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12325 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12326 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12327 rc = BAD_VALUE;
12328 }
12329 }
12330
12331 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12332 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12333 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12334 fwk_effectMode);
12335 if (NAME_NOT_FOUND != val) {
12336 uint8_t effectMode = (uint8_t)val;
12337 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12338 rc = BAD_VALUE;
12339 }
12340 }
12341 }
12342
12343 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12344 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12345 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12346 colorCorrectMode)) {
12347 rc = BAD_VALUE;
12348 }
12349 }
12350
12351 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12352 cam_color_correct_gains_t colorCorrectGains;
12353 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12354 colorCorrectGains.gains[i] =
12355 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12356 }
12357 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12358 colorCorrectGains)) {
12359 rc = BAD_VALUE;
12360 }
12361 }
12362
12363 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12364 cam_color_correct_matrix_t colorCorrectTransform;
12365 cam_rational_type_t transform_elem;
12366 size_t num = 0;
12367 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12368 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12369 transform_elem.numerator =
12370 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12371 transform_elem.denominator =
12372 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12373 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12374 num++;
12375 }
12376 }
12377 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12378 colorCorrectTransform)) {
12379 rc = BAD_VALUE;
12380 }
12381 }
12382
12383 cam_trigger_t aecTrigger;
12384 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12385 aecTrigger.trigger_id = -1;
12386 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12387 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12388 aecTrigger.trigger =
12389 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12390 aecTrigger.trigger_id =
12391 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12392 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12393 aecTrigger)) {
12394 rc = BAD_VALUE;
12395 }
12396 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12397 aecTrigger.trigger, aecTrigger.trigger_id);
12398 }
12399
12400 /*af_trigger must come with a trigger id*/
12401 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12402 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12403 cam_trigger_t af_trigger;
12404 af_trigger.trigger =
12405 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12406 af_trigger.trigger_id =
12407 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12408 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12409 rc = BAD_VALUE;
12410 }
12411 LOGD("AfTrigger: %d AfTriggerID: %d",
12412 af_trigger.trigger, af_trigger.trigger_id);
12413 }
12414
12415 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12416 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12417 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12418 rc = BAD_VALUE;
12419 }
12420 }
12421 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12422 cam_edge_application_t edge_application;
12423 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012424
Thierry Strudel3d639192016-09-09 11:52:26 -070012425 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12426 edge_application.sharpness = 0;
12427 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012428 edge_application.sharpness =
12429 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12430 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12431 int32_t sharpness =
12432 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12433 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12434 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12435 LOGD("Setting edge mode sharpness %d", sharpness);
12436 edge_application.sharpness = sharpness;
12437 }
12438 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012439 }
12440 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12441 rc = BAD_VALUE;
12442 }
12443 }
12444
12445 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12446 int32_t respectFlashMode = 1;
12447 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12448 uint8_t fwk_aeMode =
12449 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012450 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12451 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12452 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012453 respectFlashMode = 0;
12454 LOGH("AE Mode controls flash, ignore android.flash.mode");
12455 }
12456 }
12457 if (respectFlashMode) {
12458 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12459 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12460 LOGH("flash mode after mapping %d", val);
12461 // To check: CAM_INTF_META_FLASH_MODE usage
12462 if (NAME_NOT_FOUND != val) {
12463 uint8_t flashMode = (uint8_t)val;
12464 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12465 rc = BAD_VALUE;
12466 }
12467 }
12468 }
12469 }
12470
12471 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12472 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12473 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12474 rc = BAD_VALUE;
12475 }
12476 }
12477
12478 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12479 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12480 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12481 flashFiringTime)) {
12482 rc = BAD_VALUE;
12483 }
12484 }
12485
12486 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12487 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12488 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12489 hotPixelMode)) {
12490 rc = BAD_VALUE;
12491 }
12492 }
12493
12494 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12495 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12496 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12497 lensAperture)) {
12498 rc = BAD_VALUE;
12499 }
12500 }
12501
12502 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12503 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12504 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12505 filterDensity)) {
12506 rc = BAD_VALUE;
12507 }
12508 }
12509
12510 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12511 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12512 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12513 focalLength)) {
12514 rc = BAD_VALUE;
12515 }
12516 }
12517
12518 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12519 uint8_t optStabMode =
12520 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12521 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12522 optStabMode)) {
12523 rc = BAD_VALUE;
12524 }
12525 }
12526
12527 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12528 uint8_t videoStabMode =
12529 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12530 LOGD("videoStabMode from APP = %d", videoStabMode);
12531 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12532 videoStabMode)) {
12533 rc = BAD_VALUE;
12534 }
12535 }
12536
12537
12538 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12539 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12540 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12541 noiseRedMode)) {
12542 rc = BAD_VALUE;
12543 }
12544 }
12545
12546 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12547 float reprocessEffectiveExposureFactor =
12548 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12549 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12550 reprocessEffectiveExposureFactor)) {
12551 rc = BAD_VALUE;
12552 }
12553 }
12554
12555 cam_crop_region_t scalerCropRegion;
12556 bool scalerCropSet = false;
12557 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12558 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12559 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12560 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12561 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12562
12563 // Map coordinate system from active array to sensor output.
12564 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12565 scalerCropRegion.width, scalerCropRegion.height);
12566
12567 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12568 scalerCropRegion)) {
12569 rc = BAD_VALUE;
12570 }
12571 scalerCropSet = true;
12572 }
12573
12574 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12575 int64_t sensorExpTime =
12576 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12577 LOGD("setting sensorExpTime %lld", sensorExpTime);
12578 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12579 sensorExpTime)) {
12580 rc = BAD_VALUE;
12581 }
12582 }
12583
12584 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12585 int64_t sensorFrameDuration =
12586 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012587 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12588 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12589 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12590 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12591 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12592 sensorFrameDuration)) {
12593 rc = BAD_VALUE;
12594 }
12595 }
12596
12597 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12598 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12599 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12600 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12601 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12602 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12603 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12604 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12605 sensorSensitivity)) {
12606 rc = BAD_VALUE;
12607 }
12608 }
12609
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012610#ifndef USE_HAL_3_3
12611 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12612 int32_t ispSensitivity =
12613 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12614 if (ispSensitivity <
12615 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12616 ispSensitivity =
12617 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12618 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12619 }
12620 if (ispSensitivity >
12621 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12622 ispSensitivity =
12623 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12624 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12625 }
12626 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12627 ispSensitivity)) {
12628 rc = BAD_VALUE;
12629 }
12630 }
12631#endif
12632
Thierry Strudel3d639192016-09-09 11:52:26 -070012633 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12634 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12635 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12636 rc = BAD_VALUE;
12637 }
12638 }
12639
12640 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12641 uint8_t fwk_facedetectMode =
12642 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12643
12644 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12645 fwk_facedetectMode);
12646
12647 if (NAME_NOT_FOUND != val) {
12648 uint8_t facedetectMode = (uint8_t)val;
12649 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12650 facedetectMode)) {
12651 rc = BAD_VALUE;
12652 }
12653 }
12654 }
12655
Thierry Strudel54dc9782017-02-15 12:12:10 -080012656 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012657 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012658 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012659 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12660 histogramMode)) {
12661 rc = BAD_VALUE;
12662 }
12663 }
12664
12665 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12666 uint8_t sharpnessMapMode =
12667 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12668 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12669 sharpnessMapMode)) {
12670 rc = BAD_VALUE;
12671 }
12672 }
12673
12674 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12675 uint8_t tonemapMode =
12676 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12677 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12678 rc = BAD_VALUE;
12679 }
12680 }
12681 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12682 /*All tonemap channels will have the same number of points*/
12683 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12684 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12685 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12686 cam_rgb_tonemap_curves tonemapCurves;
12687 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12688 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12689 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12690 tonemapCurves.tonemap_points_cnt,
12691 CAM_MAX_TONEMAP_CURVE_SIZE);
12692 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12693 }
12694
12695 /* ch0 = G*/
12696 size_t point = 0;
12697 cam_tonemap_curve_t tonemapCurveGreen;
12698 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12699 for (size_t j = 0; j < 2; j++) {
12700 tonemapCurveGreen.tonemap_points[i][j] =
12701 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12702 point++;
12703 }
12704 }
12705 tonemapCurves.curves[0] = tonemapCurveGreen;
12706
12707 /* ch 1 = B */
12708 point = 0;
12709 cam_tonemap_curve_t tonemapCurveBlue;
12710 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12711 for (size_t j = 0; j < 2; j++) {
12712 tonemapCurveBlue.tonemap_points[i][j] =
12713 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12714 point++;
12715 }
12716 }
12717 tonemapCurves.curves[1] = tonemapCurveBlue;
12718
12719 /* ch 2 = R */
12720 point = 0;
12721 cam_tonemap_curve_t tonemapCurveRed;
12722 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12723 for (size_t j = 0; j < 2; j++) {
12724 tonemapCurveRed.tonemap_points[i][j] =
12725 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12726 point++;
12727 }
12728 }
12729 tonemapCurves.curves[2] = tonemapCurveRed;
12730
12731 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12732 tonemapCurves)) {
12733 rc = BAD_VALUE;
12734 }
12735 }
12736
12737 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12738 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12739 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12740 captureIntent)) {
12741 rc = BAD_VALUE;
12742 }
12743 }
12744
12745 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12746 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12747 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12748 blackLevelLock)) {
12749 rc = BAD_VALUE;
12750 }
12751 }
12752
12753 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12754 uint8_t lensShadingMapMode =
12755 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12756 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12757 lensShadingMapMode)) {
12758 rc = BAD_VALUE;
12759 }
12760 }
12761
12762 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12763 cam_area_t roi;
12764 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012765 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012766
12767 // Map coordinate system from active array to sensor output.
12768 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12769 roi.rect.height);
12770
12771 if (scalerCropSet) {
12772 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12773 }
12774 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12775 rc = BAD_VALUE;
12776 }
12777 }
12778
12779 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12780 cam_area_t roi;
12781 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012782 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012783
12784 // Map coordinate system from active array to sensor output.
12785 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12786 roi.rect.height);
12787
12788 if (scalerCropSet) {
12789 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12790 }
12791 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12792 rc = BAD_VALUE;
12793 }
12794 }
12795
12796 // CDS for non-HFR non-video mode
12797 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12798 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12799 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12800 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12801 LOGE("Invalid CDS mode %d!", *fwk_cds);
12802 } else {
12803 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12804 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12805 rc = BAD_VALUE;
12806 }
12807 }
12808 }
12809
Thierry Strudel04e026f2016-10-10 11:27:36 -070012810 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012811 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012812 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012813 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12814 }
12815 if (m_bVideoHdrEnabled)
12816 vhdr = CAM_VIDEO_HDR_MODE_ON;
12817
Thierry Strudel54dc9782017-02-15 12:12:10 -080012818 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12819
12820 if(vhdr != curr_hdr_state)
12821 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12822
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012823 rc = setVideoHdrMode(mParameters, vhdr);
12824 if (rc != NO_ERROR) {
12825 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012826 }
12827
12828 //IR
12829 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12830 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12831 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012832 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12833 uint8_t isIRon = 0;
12834
12835 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012836 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12837 LOGE("Invalid IR mode %d!", fwk_ir);
12838 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012839 if(isIRon != curr_ir_state )
12840 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12841
Thierry Strudel04e026f2016-10-10 11:27:36 -070012842 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12843 CAM_INTF_META_IR_MODE, fwk_ir)) {
12844 rc = BAD_VALUE;
12845 }
12846 }
12847 }
12848
Thierry Strudel54dc9782017-02-15 12:12:10 -080012849 //Binning Correction Mode
12850 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12851 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12852 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12853 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12854 || (0 > fwk_binning_correction)) {
12855 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12856 } else {
12857 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12858 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12859 rc = BAD_VALUE;
12860 }
12861 }
12862 }
12863
Thierry Strudel269c81a2016-10-12 12:13:59 -070012864 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12865 float aec_speed;
12866 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12867 LOGD("AEC Speed :%f", aec_speed);
12868 if ( aec_speed < 0 ) {
12869 LOGE("Invalid AEC mode %f!", aec_speed);
12870 } else {
12871 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12872 aec_speed)) {
12873 rc = BAD_VALUE;
12874 }
12875 }
12876 }
12877
12878 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12879 float awb_speed;
12880 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12881 LOGD("AWB Speed :%f", awb_speed);
12882 if ( awb_speed < 0 ) {
12883 LOGE("Invalid AWB mode %f!", awb_speed);
12884 } else {
12885 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12886 awb_speed)) {
12887 rc = BAD_VALUE;
12888 }
12889 }
12890 }
12891
Thierry Strudel3d639192016-09-09 11:52:26 -070012892 // TNR
12893 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12894 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12895 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012896 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012897 cam_denoise_param_t tnr;
12898 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12899 tnr.process_plates =
12900 (cam_denoise_process_type_t)frame_settings.find(
12901 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12902 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012903
12904 if(b_TnrRequested != curr_tnr_state)
12905 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12906
Thierry Strudel3d639192016-09-09 11:52:26 -070012907 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12908 rc = BAD_VALUE;
12909 }
12910 }
12911
Thierry Strudel54dc9782017-02-15 12:12:10 -080012912 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012913 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012914 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012915 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12916 *exposure_metering_mode)) {
12917 rc = BAD_VALUE;
12918 }
12919 }
12920
Thierry Strudel3d639192016-09-09 11:52:26 -070012921 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12922 int32_t fwk_testPatternMode =
12923 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12924 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12925 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12926
12927 if (NAME_NOT_FOUND != testPatternMode) {
12928 cam_test_pattern_data_t testPatternData;
12929 memset(&testPatternData, 0, sizeof(testPatternData));
12930 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12931 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12932 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12933 int32_t *fwk_testPatternData =
12934 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12935 testPatternData.r = fwk_testPatternData[0];
12936 testPatternData.b = fwk_testPatternData[3];
12937 switch (gCamCapability[mCameraId]->color_arrangement) {
12938 case CAM_FILTER_ARRANGEMENT_RGGB:
12939 case CAM_FILTER_ARRANGEMENT_GRBG:
12940 testPatternData.gr = fwk_testPatternData[1];
12941 testPatternData.gb = fwk_testPatternData[2];
12942 break;
12943 case CAM_FILTER_ARRANGEMENT_GBRG:
12944 case CAM_FILTER_ARRANGEMENT_BGGR:
12945 testPatternData.gr = fwk_testPatternData[2];
12946 testPatternData.gb = fwk_testPatternData[1];
12947 break;
12948 default:
12949 LOGE("color arrangement %d is not supported",
12950 gCamCapability[mCameraId]->color_arrangement);
12951 break;
12952 }
12953 }
12954 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12955 testPatternData)) {
12956 rc = BAD_VALUE;
12957 }
12958 } else {
12959 LOGE("Invalid framework sensor test pattern mode %d",
12960 fwk_testPatternMode);
12961 }
12962 }
12963
12964 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12965 size_t count = 0;
12966 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12967 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12968 gps_coords.data.d, gps_coords.count, count);
12969 if (gps_coords.count != count) {
12970 rc = BAD_VALUE;
12971 }
12972 }
12973
12974 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12975 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12976 size_t count = 0;
12977 const char *gps_methods_src = (const char *)
12978 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12979 memset(gps_methods, '\0', sizeof(gps_methods));
12980 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12981 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12982 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12983 if (GPS_PROCESSING_METHOD_SIZE != count) {
12984 rc = BAD_VALUE;
12985 }
12986 }
12987
12988 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12989 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12990 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12991 gps_timestamp)) {
12992 rc = BAD_VALUE;
12993 }
12994 }
12995
12996 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12997 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12998 cam_rotation_info_t rotation_info;
12999 if (orientation == 0) {
13000 rotation_info.rotation = ROTATE_0;
13001 } else if (orientation == 90) {
13002 rotation_info.rotation = ROTATE_90;
13003 } else if (orientation == 180) {
13004 rotation_info.rotation = ROTATE_180;
13005 } else if (orientation == 270) {
13006 rotation_info.rotation = ROTATE_270;
13007 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070013008 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070013009 rotation_info.streamId = snapshotStreamId;
13010 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
13011 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
13012 rc = BAD_VALUE;
13013 }
13014 }
13015
13016 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
13017 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
13018 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
13019 rc = BAD_VALUE;
13020 }
13021 }
13022
13023 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13024 uint32_t thumb_quality = (uint32_t)
13025 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13026 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13027 thumb_quality)) {
13028 rc = BAD_VALUE;
13029 }
13030 }
13031
13032 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13033 cam_dimension_t dim;
13034 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13035 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13036 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13037 rc = BAD_VALUE;
13038 }
13039 }
13040
13041 // Internal metadata
13042 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13043 size_t count = 0;
13044 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13045 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13046 privatedata.data.i32, privatedata.count, count);
13047 if (privatedata.count != count) {
13048 rc = BAD_VALUE;
13049 }
13050 }
13051
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013052 // ISO/Exposure Priority
13053 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13054 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13055 cam_priority_mode_t mode =
13056 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13057 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13058 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13059 use_iso_exp_pty.previewOnly = FALSE;
13060 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13061 use_iso_exp_pty.value = *ptr;
13062
13063 if(CAM_ISO_PRIORITY == mode) {
13064 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13065 use_iso_exp_pty)) {
13066 rc = BAD_VALUE;
13067 }
13068 }
13069 else {
13070 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13071 use_iso_exp_pty)) {
13072 rc = BAD_VALUE;
13073 }
13074 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013075
13076 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13077 rc = BAD_VALUE;
13078 }
13079 }
13080 } else {
13081 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13082 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013083 }
13084 }
13085
13086 // Saturation
13087 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13088 int32_t* use_saturation =
13089 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13090 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13091 rc = BAD_VALUE;
13092 }
13093 }
13094
Thierry Strudel3d639192016-09-09 11:52:26 -070013095 // EV step
13096 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13097 gCamCapability[mCameraId]->exp_compensation_step)) {
13098 rc = BAD_VALUE;
13099 }
13100
13101 // CDS info
13102 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13103 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13104 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13105
13106 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13107 CAM_INTF_META_CDS_DATA, *cdsData)) {
13108 rc = BAD_VALUE;
13109 }
13110 }
13111
Shuzhen Wang19463d72016-03-08 11:09:52 -080013112 // Hybrid AE
13113 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13114 uint8_t *hybrid_ae = (uint8_t *)
13115 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
Shuzhen Wang77b049a2017-08-30 12:24:36 -070013116 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13117 rc = BAD_VALUE;
13118 }
Shuzhen Wang19463d72016-03-08 11:09:52 -080013119 }
13120
Shuzhen Wang14415f52016-11-16 18:26:18 -080013121 // Histogram
13122 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13123 uint8_t histogramMode =
13124 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13125 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13126 histogramMode)) {
13127 rc = BAD_VALUE;
13128 }
13129 }
13130
13131 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13132 int32_t histogramBins =
13133 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13134 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13135 histogramBins)) {
13136 rc = BAD_VALUE;
13137 }
13138 }
13139
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013140 // Tracking AF
13141 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13142 uint8_t trackingAfTrigger =
13143 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13144 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13145 trackingAfTrigger)) {
13146 rc = BAD_VALUE;
13147 }
13148 }
13149
Chien-Yu Chendbd619b2017-08-04 17:50:11 -070013150 // Makernote
13151 camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13152 if (entry.count != 0) {
13153 if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13154 cam_makernote_t makernote;
13155 makernote.length = entry.count;
13156 memcpy(makernote.data, entry.data.u8, makernote.length);
13157 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13158 rc = BAD_VALUE;
13159 }
13160 } else {
13161 ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13162 MAX_MAKERNOTE_LENGTH);
13163 rc = BAD_VALUE;
13164 }
13165 }
13166
Thierry Strudel3d639192016-09-09 11:52:26 -070013167 return rc;
13168}
13169
13170/*===========================================================================
13171 * FUNCTION : captureResultCb
13172 *
13173 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13174 *
13175 * PARAMETERS :
13176 * @frame : frame information from mm-camera-interface
13177 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13178 * @userdata: userdata
13179 *
13180 * RETURN : NONE
13181 *==========================================================================*/
13182void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13183 camera3_stream_buffer_t *buffer,
13184 uint32_t frame_number, bool isInputBuffer, void *userdata)
13185{
13186 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13187 if (hw == NULL) {
13188 LOGE("Invalid hw %p", hw);
13189 return;
13190 }
13191
13192 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13193 return;
13194}
13195
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013196/*===========================================================================
13197 * FUNCTION : setBufferErrorStatus
13198 *
13199 * DESCRIPTION: Callback handler for channels to report any buffer errors
13200 *
13201 * PARAMETERS :
13202 * @ch : Channel on which buffer error is reported from
13203 * @frame_number : frame number on which buffer error is reported on
13204 * @buffer_status : buffer error status
13205 * @userdata: userdata
13206 *
13207 * RETURN : NONE
13208 *==========================================================================*/
13209void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13210 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13211{
13212 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13213 if (hw == NULL) {
13214 LOGE("Invalid hw %p", hw);
13215 return;
13216 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013217
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013218 hw->setBufferErrorStatus(ch, frame_number, err);
13219 return;
13220}
13221
13222void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13223 uint32_t frameNumber, camera3_buffer_status_t err)
13224{
13225 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13226 pthread_mutex_lock(&mMutex);
13227
13228 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13229 if (req.frame_number != frameNumber)
13230 continue;
13231 for (auto& k : req.mPendingBufferList) {
13232 if(k.stream->priv == ch) {
13233 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13234 }
13235 }
13236 }
13237
13238 pthread_mutex_unlock(&mMutex);
13239 return;
13240}
Thierry Strudel3d639192016-09-09 11:52:26 -070013241/*===========================================================================
13242 * FUNCTION : initialize
13243 *
13244 * DESCRIPTION: Pass framework callback pointers to HAL
13245 *
13246 * PARAMETERS :
13247 *
13248 *
13249 * RETURN : Success : 0
13250 * Failure: -ENODEV
13251 *==========================================================================*/
13252
13253int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13254 const camera3_callback_ops_t *callback_ops)
13255{
13256 LOGD("E");
13257 QCamera3HardwareInterface *hw =
13258 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13259 if (!hw) {
13260 LOGE("NULL camera device");
13261 return -ENODEV;
13262 }
13263
13264 int rc = hw->initialize(callback_ops);
13265 LOGD("X");
13266 return rc;
13267}
13268
13269/*===========================================================================
13270 * FUNCTION : configure_streams
13271 *
13272 * DESCRIPTION:
13273 *
13274 * PARAMETERS :
13275 *
13276 *
13277 * RETURN : Success: 0
13278 * Failure: -EINVAL (if stream configuration is invalid)
13279 * -ENODEV (fatal error)
13280 *==========================================================================*/
13281
13282int QCamera3HardwareInterface::configure_streams(
13283 const struct camera3_device *device,
13284 camera3_stream_configuration_t *stream_list)
13285{
13286 LOGD("E");
13287 QCamera3HardwareInterface *hw =
13288 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13289 if (!hw) {
13290 LOGE("NULL camera device");
13291 return -ENODEV;
13292 }
13293 int rc = hw->configureStreams(stream_list);
13294 LOGD("X");
13295 return rc;
13296}
13297
13298/*===========================================================================
13299 * FUNCTION : construct_default_request_settings
13300 *
13301 * DESCRIPTION: Configure a settings buffer to meet the required use case
13302 *
13303 * PARAMETERS :
13304 *
13305 *
13306 * RETURN : Success: Return valid metadata
13307 * Failure: Return NULL
13308 *==========================================================================*/
13309const camera_metadata_t* QCamera3HardwareInterface::
13310 construct_default_request_settings(const struct camera3_device *device,
13311 int type)
13312{
13313
13314 LOGD("E");
13315 camera_metadata_t* fwk_metadata = NULL;
13316 QCamera3HardwareInterface *hw =
13317 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13318 if (!hw) {
13319 LOGE("NULL camera device");
13320 return NULL;
13321 }
13322
13323 fwk_metadata = hw->translateCapabilityToMetadata(type);
13324
13325 LOGD("X");
13326 return fwk_metadata;
13327}
13328
13329/*===========================================================================
13330 * FUNCTION : process_capture_request
13331 *
13332 * DESCRIPTION:
13333 *
13334 * PARAMETERS :
13335 *
13336 *
13337 * RETURN :
13338 *==========================================================================*/
13339int QCamera3HardwareInterface::process_capture_request(
13340 const struct camera3_device *device,
13341 camera3_capture_request_t *request)
13342{
13343 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013344 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013345 QCamera3HardwareInterface *hw =
13346 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13347 if (!hw) {
13348 LOGE("NULL camera device");
13349 return -EINVAL;
13350 }
13351
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013352 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013353 LOGD("X");
13354 return rc;
13355}
13356
13357/*===========================================================================
13358 * FUNCTION : dump
13359 *
13360 * DESCRIPTION:
13361 *
13362 * PARAMETERS :
13363 *
13364 *
13365 * RETURN :
13366 *==========================================================================*/
13367
13368void QCamera3HardwareInterface::dump(
13369 const struct camera3_device *device, int fd)
13370{
13371 /* Log level property is read when "adb shell dumpsys media.camera" is
13372 called so that the log level can be controlled without restarting
13373 the media server */
13374 getLogLevel();
13375
13376 LOGD("E");
13377 QCamera3HardwareInterface *hw =
13378 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13379 if (!hw) {
13380 LOGE("NULL camera device");
13381 return;
13382 }
13383
13384 hw->dump(fd);
13385 LOGD("X");
13386 return;
13387}
13388
13389/*===========================================================================
13390 * FUNCTION : flush
13391 *
13392 * DESCRIPTION:
13393 *
13394 * PARAMETERS :
13395 *
13396 *
13397 * RETURN :
13398 *==========================================================================*/
13399
13400int QCamera3HardwareInterface::flush(
13401 const struct camera3_device *device)
13402{
13403 int rc;
13404 LOGD("E");
13405 QCamera3HardwareInterface *hw =
13406 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13407 if (!hw) {
13408 LOGE("NULL camera device");
13409 return -EINVAL;
13410 }
13411
13412 pthread_mutex_lock(&hw->mMutex);
13413 // Validate current state
13414 switch (hw->mState) {
13415 case STARTED:
13416 /* valid state */
13417 break;
13418
13419 case ERROR:
13420 pthread_mutex_unlock(&hw->mMutex);
13421 hw->handleCameraDeviceError();
13422 return -ENODEV;
13423
13424 default:
13425 LOGI("Flush returned during state %d", hw->mState);
13426 pthread_mutex_unlock(&hw->mMutex);
13427 return 0;
13428 }
13429 pthread_mutex_unlock(&hw->mMutex);
13430
13431 rc = hw->flush(true /* restart channels */ );
13432 LOGD("X");
13433 return rc;
13434}
13435
13436/*===========================================================================
13437 * FUNCTION : close_camera_device
13438 *
13439 * DESCRIPTION:
13440 *
13441 * PARAMETERS :
13442 *
13443 *
13444 * RETURN :
13445 *==========================================================================*/
13446int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13447{
13448 int ret = NO_ERROR;
13449 QCamera3HardwareInterface *hw =
13450 reinterpret_cast<QCamera3HardwareInterface *>(
13451 reinterpret_cast<camera3_device_t *>(device)->priv);
13452 if (!hw) {
13453 LOGE("NULL camera device");
13454 return BAD_VALUE;
13455 }
13456
13457 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13458 delete hw;
13459 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013460 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013461 return ret;
13462}
13463
13464/*===========================================================================
13465 * FUNCTION : getWaveletDenoiseProcessPlate
13466 *
13467 * DESCRIPTION: query wavelet denoise process plate
13468 *
13469 * PARAMETERS : None
13470 *
13471 * RETURN : WNR prcocess plate value
13472 *==========================================================================*/
13473cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13474{
13475 char prop[PROPERTY_VALUE_MAX];
13476 memset(prop, 0, sizeof(prop));
13477 property_get("persist.denoise.process.plates", prop, "0");
13478 int processPlate = atoi(prop);
13479 switch(processPlate) {
13480 case 0:
13481 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13482 case 1:
13483 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13484 case 2:
13485 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13486 case 3:
13487 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13488 default:
13489 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13490 }
13491}
13492
13493
13494/*===========================================================================
13495 * FUNCTION : getTemporalDenoiseProcessPlate
13496 *
13497 * DESCRIPTION: query temporal denoise process plate
13498 *
13499 * PARAMETERS : None
13500 *
13501 * RETURN : TNR prcocess plate value
13502 *==========================================================================*/
13503cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13504{
13505 char prop[PROPERTY_VALUE_MAX];
13506 memset(prop, 0, sizeof(prop));
13507 property_get("persist.tnr.process.plates", prop, "0");
13508 int processPlate = atoi(prop);
13509 switch(processPlate) {
13510 case 0:
13511 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13512 case 1:
13513 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13514 case 2:
13515 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13516 case 3:
13517 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13518 default:
13519 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13520 }
13521}
13522
13523
13524/*===========================================================================
13525 * FUNCTION : extractSceneMode
13526 *
13527 * DESCRIPTION: Extract scene mode from frameworks set metadata
13528 *
13529 * PARAMETERS :
13530 * @frame_settings: CameraMetadata reference
13531 * @metaMode: ANDROID_CONTORL_MODE
13532 * @hal_metadata: hal metadata structure
13533 *
13534 * RETURN : None
13535 *==========================================================================*/
13536int32_t QCamera3HardwareInterface::extractSceneMode(
13537 const CameraMetadata &frame_settings, uint8_t metaMode,
13538 metadata_buffer_t *hal_metadata)
13539{
13540 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013541 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13542
13543 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13544 LOGD("Ignoring control mode OFF_KEEP_STATE");
13545 return NO_ERROR;
13546 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013547
13548 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13549 camera_metadata_ro_entry entry =
13550 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13551 if (0 == entry.count)
13552 return rc;
13553
13554 uint8_t fwk_sceneMode = entry.data.u8[0];
13555
13556 int val = lookupHalName(SCENE_MODES_MAP,
13557 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13558 fwk_sceneMode);
13559 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013560 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013561 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013562 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013563 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013564
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013565 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13566 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13567 }
13568
13569 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13570 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013571 cam_hdr_param_t hdr_params;
13572 hdr_params.hdr_enable = 1;
13573 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13574 hdr_params.hdr_need_1x = false;
13575 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13576 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13577 rc = BAD_VALUE;
13578 }
13579 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013580
Thierry Strudel3d639192016-09-09 11:52:26 -070013581 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13582 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13583 rc = BAD_VALUE;
13584 }
13585 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013586
13587 if (mForceHdrSnapshot) {
13588 cam_hdr_param_t hdr_params;
13589 hdr_params.hdr_enable = 1;
13590 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13591 hdr_params.hdr_need_1x = false;
13592 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13593 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13594 rc = BAD_VALUE;
13595 }
13596 }
13597
Thierry Strudel3d639192016-09-09 11:52:26 -070013598 return rc;
13599}
13600
13601/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013602 * FUNCTION : setVideoHdrMode
13603 *
13604 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13605 *
13606 * PARAMETERS :
13607 * @hal_metadata: hal metadata structure
13608 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13609 *
13610 * RETURN : None
13611 *==========================================================================*/
13612int32_t QCamera3HardwareInterface::setVideoHdrMode(
13613 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13614{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013615 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13616 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13617 }
13618
13619 LOGE("Invalid Video HDR mode %d!", vhdr);
13620 return BAD_VALUE;
13621}
13622
13623/*===========================================================================
13624 * FUNCTION : setSensorHDR
13625 *
13626 * DESCRIPTION: Enable/disable sensor HDR.
13627 *
13628 * PARAMETERS :
13629 * @hal_metadata: hal metadata structure
13630 * @enable: boolean whether to enable/disable sensor HDR
13631 *
13632 * RETURN : None
13633 *==========================================================================*/
13634int32_t QCamera3HardwareInterface::setSensorHDR(
13635 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13636{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013637 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013638 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13639
13640 if (enable) {
13641 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13642 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13643 #ifdef _LE_CAMERA_
13644 //Default to staggered HDR for IOT
13645 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13646 #else
13647 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13648 #endif
13649 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13650 }
13651
13652 bool isSupported = false;
13653 switch (sensor_hdr) {
13654 case CAM_SENSOR_HDR_IN_SENSOR:
13655 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13656 CAM_QCOM_FEATURE_SENSOR_HDR) {
13657 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013658 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013659 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013660 break;
13661 case CAM_SENSOR_HDR_ZIGZAG:
13662 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13663 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13664 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013665 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013666 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013667 break;
13668 case CAM_SENSOR_HDR_STAGGERED:
13669 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13670 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13671 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013672 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013673 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013674 break;
13675 case CAM_SENSOR_HDR_OFF:
13676 isSupported = true;
13677 LOGD("Turning off sensor HDR");
13678 break;
13679 default:
13680 LOGE("HDR mode %d not supported", sensor_hdr);
13681 rc = BAD_VALUE;
13682 break;
13683 }
13684
13685 if(isSupported) {
13686 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13687 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13688 rc = BAD_VALUE;
13689 } else {
13690 if(!isVideoHdrEnable)
13691 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013692 }
13693 }
13694 return rc;
13695}
13696
13697/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013698 * FUNCTION : needRotationReprocess
13699 *
13700 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13701 *
13702 * PARAMETERS : none
13703 *
13704 * RETURN : true: needed
13705 * false: no need
13706 *==========================================================================*/
13707bool QCamera3HardwareInterface::needRotationReprocess()
13708{
13709 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13710 // current rotation is not zero, and pp has the capability to process rotation
13711 LOGH("need do reprocess for rotation");
13712 return true;
13713 }
13714
13715 return false;
13716}
13717
13718/*===========================================================================
13719 * FUNCTION : needReprocess
13720 *
13721 * DESCRIPTION: if reprocess in needed
13722 *
13723 * PARAMETERS : none
13724 *
13725 * RETURN : true: needed
13726 * false: no need
13727 *==========================================================================*/
13728bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13729{
13730 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13731 // TODO: add for ZSL HDR later
13732 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13733 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13734 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13735 return true;
13736 } else {
13737 LOGH("already post processed frame");
13738 return false;
13739 }
13740 }
13741 return needRotationReprocess();
13742}
13743
13744/*===========================================================================
13745 * FUNCTION : needJpegExifRotation
13746 *
13747 * DESCRIPTION: if rotation from jpeg is needed
13748 *
13749 * PARAMETERS : none
13750 *
13751 * RETURN : true: needed
13752 * false: no need
13753 *==========================================================================*/
13754bool QCamera3HardwareInterface::needJpegExifRotation()
13755{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013756 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013757 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13758 LOGD("Need use Jpeg EXIF Rotation");
13759 return true;
13760 }
13761 return false;
13762}
13763
13764/*===========================================================================
13765 * FUNCTION : addOfflineReprocChannel
13766 *
13767 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13768 * coming from input channel
13769 *
13770 * PARAMETERS :
13771 * @config : reprocess configuration
13772 * @inputChHandle : pointer to the input (source) channel
13773 *
13774 *
13775 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13776 *==========================================================================*/
13777QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13778 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13779{
13780 int32_t rc = NO_ERROR;
13781 QCamera3ReprocessChannel *pChannel = NULL;
13782
13783 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013784 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13785 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013786 if (NULL == pChannel) {
13787 LOGE("no mem for reprocess channel");
13788 return NULL;
13789 }
13790
13791 rc = pChannel->initialize(IS_TYPE_NONE);
13792 if (rc != NO_ERROR) {
13793 LOGE("init reprocess channel failed, ret = %d", rc);
13794 delete pChannel;
13795 return NULL;
13796 }
13797
13798 // pp feature config
13799 cam_pp_feature_config_t pp_config;
13800 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13801
13802 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13803 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13804 & CAM_QCOM_FEATURE_DSDN) {
13805 //Use CPP CDS incase h/w supports it.
13806 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13807 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13808 }
13809 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13810 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13811 }
13812
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013813 if (config.hdr_param.hdr_enable) {
13814 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13815 pp_config.hdr_param = config.hdr_param;
13816 }
13817
13818 if (mForceHdrSnapshot) {
13819 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13820 pp_config.hdr_param.hdr_enable = 1;
13821 pp_config.hdr_param.hdr_need_1x = 0;
13822 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13823 }
13824
Thierry Strudel3d639192016-09-09 11:52:26 -070013825 rc = pChannel->addReprocStreamsFromSource(pp_config,
13826 config,
13827 IS_TYPE_NONE,
13828 mMetadataChannel);
13829
13830 if (rc != NO_ERROR) {
13831 delete pChannel;
13832 return NULL;
13833 }
13834 return pChannel;
13835}
13836
13837/*===========================================================================
13838 * FUNCTION : getMobicatMask
13839 *
13840 * DESCRIPTION: returns mobicat mask
13841 *
13842 * PARAMETERS : none
13843 *
13844 * RETURN : mobicat mask
13845 *
13846 *==========================================================================*/
13847uint8_t QCamera3HardwareInterface::getMobicatMask()
13848{
13849 return m_MobicatMask;
13850}
13851
13852/*===========================================================================
13853 * FUNCTION : setMobicat
13854 *
13855 * DESCRIPTION: set Mobicat on/off.
13856 *
13857 * PARAMETERS :
13858 * @params : none
13859 *
13860 * RETURN : int32_t type of status
13861 * NO_ERROR -- success
13862 * none-zero failure code
13863 *==========================================================================*/
13864int32_t QCamera3HardwareInterface::setMobicat()
13865{
Thierry Strudel3d639192016-09-09 11:52:26 -070013866 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013867
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013868 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013869 tune_cmd_t tune_cmd;
13870 tune_cmd.type = SET_RELOAD_CHROMATIX;
13871 tune_cmd.module = MODULE_ALL;
13872 tune_cmd.value = TRUE;
13873 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13874 CAM_INTF_PARM_SET_VFE_COMMAND,
13875 tune_cmd);
13876
13877 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13878 CAM_INTF_PARM_SET_PP_COMMAND,
13879 tune_cmd);
13880 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013881
13882 return ret;
13883}
13884
13885/*===========================================================================
13886* FUNCTION : getLogLevel
13887*
13888* DESCRIPTION: Reads the log level property into a variable
13889*
13890* PARAMETERS :
13891* None
13892*
13893* RETURN :
13894* None
13895*==========================================================================*/
13896void QCamera3HardwareInterface::getLogLevel()
13897{
13898 char prop[PROPERTY_VALUE_MAX];
13899 uint32_t globalLogLevel = 0;
13900
13901 property_get("persist.camera.hal.debug", prop, "0");
13902 int val = atoi(prop);
13903 if (0 <= val) {
13904 gCamHal3LogLevel = (uint32_t)val;
13905 }
13906
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013907 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013908 gKpiDebugLevel = atoi(prop);
13909
13910 property_get("persist.camera.global.debug", prop, "0");
13911 val = atoi(prop);
13912 if (0 <= val) {
13913 globalLogLevel = (uint32_t)val;
13914 }
13915
13916 /* Highest log level among hal.logs and global.logs is selected */
13917 if (gCamHal3LogLevel < globalLogLevel)
13918 gCamHal3LogLevel = globalLogLevel;
13919
13920 return;
13921}
13922
13923/*===========================================================================
13924 * FUNCTION : validateStreamRotations
13925 *
13926 * DESCRIPTION: Check if the rotations requested are supported
13927 *
13928 * PARAMETERS :
13929 * @stream_list : streams to be configured
13930 *
13931 * RETURN : NO_ERROR on success
13932 * -EINVAL on failure
13933 *
13934 *==========================================================================*/
13935int QCamera3HardwareInterface::validateStreamRotations(
13936 camera3_stream_configuration_t *streamList)
13937{
13938 int rc = NO_ERROR;
13939
13940 /*
13941 * Loop through all streams requested in configuration
13942 * Check if unsupported rotations have been requested on any of them
13943 */
13944 for (size_t j = 0; j < streamList->num_streams; j++){
13945 camera3_stream_t *newStream = streamList->streams[j];
13946
Emilian Peev35ceeed2017-06-29 11:58:56 -070013947 switch(newStream->rotation) {
13948 case CAMERA3_STREAM_ROTATION_0:
13949 case CAMERA3_STREAM_ROTATION_90:
13950 case CAMERA3_STREAM_ROTATION_180:
13951 case CAMERA3_STREAM_ROTATION_270:
13952 //Expected values
13953 break;
13954 default:
13955 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13956 "type:%d and stream format:%d", __func__,
13957 newStream->rotation, newStream->stream_type,
13958 newStream->format);
13959 return -EINVAL;
13960 }
13961
Thierry Strudel3d639192016-09-09 11:52:26 -070013962 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13963 bool isImplDef = (newStream->format ==
13964 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13965 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13966 isImplDef);
13967
13968 if (isRotated && (!isImplDef || isZsl)) {
13969 LOGE("Error: Unsupported rotation of %d requested for stream"
13970 "type:%d and stream format:%d",
13971 newStream->rotation, newStream->stream_type,
13972 newStream->format);
13973 rc = -EINVAL;
13974 break;
13975 }
13976 }
13977
13978 return rc;
13979}
13980
13981/*===========================================================================
13982* FUNCTION : getFlashInfo
13983*
13984* DESCRIPTION: Retrieve information about whether the device has a flash.
13985*
13986* PARAMETERS :
13987* @cameraId : Camera id to query
13988* @hasFlash : Boolean indicating whether there is a flash device
13989* associated with given camera
13990* @flashNode : If a flash device exists, this will be its device node.
13991*
13992* RETURN :
13993* None
13994*==========================================================================*/
13995void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13996 bool& hasFlash,
13997 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13998{
13999 cam_capability_t* camCapability = gCamCapability[cameraId];
14000 if (NULL == camCapability) {
14001 hasFlash = false;
14002 flashNode[0] = '\0';
14003 } else {
14004 hasFlash = camCapability->flash_available;
14005 strlcpy(flashNode,
14006 (char*)camCapability->flash_dev_name,
14007 QCAMERA_MAX_FILEPATH_LENGTH);
14008 }
14009}
14010
14011/*===========================================================================
14012* FUNCTION : getEepromVersionInfo
14013*
14014* DESCRIPTION: Retrieve version info of the sensor EEPROM data
14015*
14016* PARAMETERS : None
14017*
14018* RETURN : string describing EEPROM version
14019* "\0" if no such info available
14020*==========================================================================*/
14021const char *QCamera3HardwareInterface::getEepromVersionInfo()
14022{
14023 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14024}
14025
14026/*===========================================================================
14027* FUNCTION : getLdafCalib
14028*
14029* DESCRIPTION: Retrieve Laser AF calibration data
14030*
14031* PARAMETERS : None
14032*
14033* RETURN : Two uint32_t describing laser AF calibration data
14034* NULL if none is available.
14035*==========================================================================*/
14036const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14037{
14038 if (mLdafCalibExist) {
14039 return &mLdafCalib[0];
14040 } else {
14041 return NULL;
14042 }
14043}
14044
14045/*===========================================================================
Arnd Geis082a4d72017-08-24 10:33:07 -070014046* FUNCTION : getEaselFwVersion
14047*
14048* DESCRIPTION: Retrieve Easel firmware version
14049*
14050* PARAMETERS : None
14051*
14052* RETURN : string describing Firmware version
14053* "\0" if Easel manager client is not open
14054*==========================================================================*/
14055const char *QCamera3HardwareInterface::getEaselFwVersion()
14056{
14057 int rc = NO_ERROR;
14058
14059 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
14060 ALOGD("%s: Querying Easel firmware version", __FUNCTION__);
14061 if (EaselManagerClientOpened) {
14062 rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
14063 if (rc != OK)
14064 ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
14065 else
14066 return (const char *)&mEaselFwVersion[0];
14067 }
14068 return NULL;
14069}
14070
14071/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014072 * FUNCTION : dynamicUpdateMetaStreamInfo
14073 *
14074 * DESCRIPTION: This function:
14075 * (1) stops all the channels
14076 * (2) returns error on pending requests and buffers
14077 * (3) sends metastream_info in setparams
14078 * (4) starts all channels
14079 * This is useful when sensor has to be restarted to apply any
14080 * settings such as frame rate from a different sensor mode
14081 *
14082 * PARAMETERS : None
14083 *
14084 * RETURN : NO_ERROR on success
14085 * Error codes on failure
14086 *
14087 *==========================================================================*/
14088int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14089{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014090 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014091 int rc = NO_ERROR;
14092
14093 LOGD("E");
14094
14095 rc = stopAllChannels();
14096 if (rc < 0) {
14097 LOGE("stopAllChannels failed");
14098 return rc;
14099 }
14100
14101 rc = notifyErrorForPendingRequests();
14102 if (rc < 0) {
14103 LOGE("notifyErrorForPendingRequests failed");
14104 return rc;
14105 }
14106
14107 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14108 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14109 "Format:%d",
14110 mStreamConfigInfo.type[i],
14111 mStreamConfigInfo.stream_sizes[i].width,
14112 mStreamConfigInfo.stream_sizes[i].height,
14113 mStreamConfigInfo.postprocess_mask[i],
14114 mStreamConfigInfo.format[i]);
14115 }
14116
14117 /* Send meta stream info once again so that ISP can start */
14118 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14119 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14120 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14121 mParameters);
14122 if (rc < 0) {
14123 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14124 }
14125
14126 rc = startAllChannels();
14127 if (rc < 0) {
14128 LOGE("startAllChannels failed");
14129 return rc;
14130 }
14131
14132 LOGD("X");
14133 return rc;
14134}
14135
14136/*===========================================================================
14137 * FUNCTION : stopAllChannels
14138 *
14139 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14140 *
14141 * PARAMETERS : None
14142 *
14143 * RETURN : NO_ERROR on success
14144 * Error codes on failure
14145 *
14146 *==========================================================================*/
14147int32_t QCamera3HardwareInterface::stopAllChannels()
14148{
14149 int32_t rc = NO_ERROR;
14150
14151 LOGD("Stopping all channels");
14152 // Stop the Streams/Channels
14153 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14154 it != mStreamInfo.end(); it++) {
14155 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14156 if (channel) {
14157 channel->stop();
14158 }
14159 (*it)->status = INVALID;
14160 }
14161
14162 if (mSupportChannel) {
14163 mSupportChannel->stop();
14164 }
14165 if (mAnalysisChannel) {
14166 mAnalysisChannel->stop();
14167 }
14168 if (mRawDumpChannel) {
14169 mRawDumpChannel->stop();
14170 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014171 if (mHdrPlusRawSrcChannel) {
14172 mHdrPlusRawSrcChannel->stop();
14173 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014174 if (mMetadataChannel) {
14175 /* If content of mStreamInfo is not 0, there is metadata stream */
14176 mMetadataChannel->stop();
14177 }
14178
14179 LOGD("All channels stopped");
14180 return rc;
14181}
14182
14183/*===========================================================================
14184 * FUNCTION : startAllChannels
14185 *
14186 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14187 *
14188 * PARAMETERS : None
14189 *
14190 * RETURN : NO_ERROR on success
14191 * Error codes on failure
14192 *
14193 *==========================================================================*/
14194int32_t QCamera3HardwareInterface::startAllChannels()
14195{
14196 int32_t rc = NO_ERROR;
14197
14198 LOGD("Start all channels ");
14199 // Start the Streams/Channels
14200 if (mMetadataChannel) {
14201 /* If content of mStreamInfo is not 0, there is metadata stream */
14202 rc = mMetadataChannel->start();
14203 if (rc < 0) {
14204 LOGE("META channel start failed");
14205 return rc;
14206 }
14207 }
14208 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14209 it != mStreamInfo.end(); it++) {
14210 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14211 if (channel) {
14212 rc = channel->start();
14213 if (rc < 0) {
14214 LOGE("channel start failed");
14215 return rc;
14216 }
14217 }
14218 }
14219 if (mAnalysisChannel) {
14220 mAnalysisChannel->start();
14221 }
14222 if (mSupportChannel) {
14223 rc = mSupportChannel->start();
14224 if (rc < 0) {
14225 LOGE("Support channel start failed");
14226 return rc;
14227 }
14228 }
14229 if (mRawDumpChannel) {
14230 rc = mRawDumpChannel->start();
14231 if (rc < 0) {
14232 LOGE("RAW dump channel start failed");
14233 return rc;
14234 }
14235 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014236 if (mHdrPlusRawSrcChannel) {
14237 rc = mHdrPlusRawSrcChannel->start();
14238 if (rc < 0) {
14239 LOGE("HDR+ RAW channel start failed");
14240 return rc;
14241 }
14242 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014243
14244 LOGD("All channels started");
14245 return rc;
14246}
14247
14248/*===========================================================================
14249 * FUNCTION : notifyErrorForPendingRequests
14250 *
14251 * DESCRIPTION: This function sends error for all the pending requests/buffers
14252 *
14253 * PARAMETERS : None
14254 *
14255 * RETURN : Error codes
14256 * NO_ERROR on success
14257 *
14258 *==========================================================================*/
14259int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14260{
Emilian Peev7650c122017-01-19 08:24:33 -080014261 notifyErrorFoPendingDepthData(mDepthChannel);
14262
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014263 auto pendingRequest = mPendingRequestsList.begin();
14264 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014265
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014266 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14267 // buffers (for which buffers aren't sent yet).
14268 while (pendingRequest != mPendingRequestsList.end() ||
14269 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14270 if (pendingRequest == mPendingRequestsList.end() ||
14271 pendingBuffer->frame_number < pendingRequest->frame_number) {
14272 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14273 // with error.
14274 for (auto &info : pendingBuffer->mPendingBufferList) {
14275 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014276 camera3_notify_msg_t notify_msg;
14277 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14278 notify_msg.type = CAMERA3_MSG_ERROR;
14279 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014280 notify_msg.message.error.error_stream = info.stream;
14281 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014282 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014283
14284 camera3_stream_buffer_t buffer = {};
14285 buffer.acquire_fence = -1;
14286 buffer.release_fence = -1;
14287 buffer.buffer = info.buffer;
14288 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14289 buffer.stream = info.stream;
14290 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014291 }
14292
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014293 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14294 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14295 pendingBuffer->frame_number > pendingRequest->frame_number) {
14296 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014297 camera3_notify_msg_t notify_msg;
14298 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14299 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014300 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14301 notify_msg.message.error.error_stream = nullptr;
14302 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014303 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014304
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014305 if (pendingRequest->input_buffer != nullptr) {
14306 camera3_capture_result result = {};
14307 result.frame_number = pendingRequest->frame_number;
14308 result.result = nullptr;
14309 result.input_buffer = pendingRequest->input_buffer;
14310 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014311 }
14312
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014313 mShutterDispatcher.clear(pendingRequest->frame_number);
14314 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14315 } else {
14316 // If both buffers and result metadata weren't sent yet, notify about a request error
14317 // and return buffers with error.
14318 for (auto &info : pendingBuffer->mPendingBufferList) {
14319 camera3_notify_msg_t notify_msg;
14320 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14321 notify_msg.type = CAMERA3_MSG_ERROR;
14322 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14323 notify_msg.message.error.error_stream = info.stream;
14324 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14325 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014326
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014327 camera3_stream_buffer_t buffer = {};
14328 buffer.acquire_fence = -1;
14329 buffer.release_fence = -1;
14330 buffer.buffer = info.buffer;
14331 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14332 buffer.stream = info.stream;
14333 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14334 }
14335
14336 if (pendingRequest->input_buffer != nullptr) {
14337 camera3_capture_result result = {};
14338 result.frame_number = pendingRequest->frame_number;
14339 result.result = nullptr;
14340 result.input_buffer = pendingRequest->input_buffer;
14341 orchestrateResult(&result);
14342 }
14343
14344 mShutterDispatcher.clear(pendingRequest->frame_number);
14345 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14346 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014347 }
14348 }
14349
14350 /* Reset pending frame Drop list and requests list */
14351 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014352 mShutterDispatcher.clear();
14353 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014354 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014355 mExpectedFrameDuration = 0;
14356 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014357 LOGH("Cleared all the pending buffers ");
14358
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014359 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014360}
14361
14362bool QCamera3HardwareInterface::isOnEncoder(
14363 const cam_dimension_t max_viewfinder_size,
14364 uint32_t width, uint32_t height)
14365{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014366 return ((width > (uint32_t)max_viewfinder_size.width) ||
14367 (height > (uint32_t)max_viewfinder_size.height) ||
14368 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14369 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014370}
14371
14372/*===========================================================================
14373 * FUNCTION : setBundleInfo
14374 *
14375 * DESCRIPTION: Set bundle info for all streams that are bundle.
14376 *
14377 * PARAMETERS : None
14378 *
14379 * RETURN : NO_ERROR on success
14380 * Error codes on failure
14381 *==========================================================================*/
14382int32_t QCamera3HardwareInterface::setBundleInfo()
14383{
14384 int32_t rc = NO_ERROR;
14385
14386 if (mChannelHandle) {
14387 cam_bundle_config_t bundleInfo;
14388 memset(&bundleInfo, 0, sizeof(bundleInfo));
14389 rc = mCameraHandle->ops->get_bundle_info(
14390 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14391 if (rc != NO_ERROR) {
14392 LOGE("get_bundle_info failed");
14393 return rc;
14394 }
14395 if (mAnalysisChannel) {
14396 mAnalysisChannel->setBundleInfo(bundleInfo);
14397 }
14398 if (mSupportChannel) {
14399 mSupportChannel->setBundleInfo(bundleInfo);
14400 }
14401 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14402 it != mStreamInfo.end(); it++) {
14403 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14404 channel->setBundleInfo(bundleInfo);
14405 }
14406 if (mRawDumpChannel) {
14407 mRawDumpChannel->setBundleInfo(bundleInfo);
14408 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014409 if (mHdrPlusRawSrcChannel) {
14410 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14411 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014412 }
14413
14414 return rc;
14415}
14416
14417/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014418 * FUNCTION : setInstantAEC
14419 *
14420 * DESCRIPTION: Set Instant AEC related params.
14421 *
14422 * PARAMETERS :
14423 * @meta: CameraMetadata reference
14424 *
14425 * RETURN : NO_ERROR on success
14426 * Error codes on failure
14427 *==========================================================================*/
14428int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14429{
14430 int32_t rc = NO_ERROR;
14431 uint8_t val = 0;
14432 char prop[PROPERTY_VALUE_MAX];
14433
14434 // First try to configure instant AEC from framework metadata
14435 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14436 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14437 }
14438
14439 // If framework did not set this value, try to read from set prop.
14440 if (val == 0) {
14441 memset(prop, 0, sizeof(prop));
14442 property_get("persist.camera.instant.aec", prop, "0");
14443 val = (uint8_t)atoi(prop);
14444 }
14445
14446 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14447 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14448 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14449 mInstantAEC = val;
14450 mInstantAECSettledFrameNumber = 0;
14451 mInstantAecFrameIdxCount = 0;
14452 LOGH("instantAEC value set %d",val);
14453 if (mInstantAEC) {
14454 memset(prop, 0, sizeof(prop));
14455 property_get("persist.camera.ae.instant.bound", prop, "10");
14456 int32_t aec_frame_skip_cnt = atoi(prop);
14457 if (aec_frame_skip_cnt >= 0) {
14458 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14459 } else {
14460 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14461 rc = BAD_VALUE;
14462 }
14463 }
14464 } else {
14465 LOGE("Bad instant aec value set %d", val);
14466 rc = BAD_VALUE;
14467 }
14468 return rc;
14469}
14470
14471/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014472 * FUNCTION : get_num_overall_buffers
14473 *
14474 * DESCRIPTION: Estimate number of pending buffers across all requests.
14475 *
14476 * PARAMETERS : None
14477 *
14478 * RETURN : Number of overall pending buffers
14479 *
14480 *==========================================================================*/
14481uint32_t PendingBuffersMap::get_num_overall_buffers()
14482{
14483 uint32_t sum_buffers = 0;
14484 for (auto &req : mPendingBuffersInRequest) {
14485 sum_buffers += req.mPendingBufferList.size();
14486 }
14487 return sum_buffers;
14488}
14489
14490/*===========================================================================
14491 * FUNCTION : removeBuf
14492 *
14493 * DESCRIPTION: Remove a matching buffer from tracker.
14494 *
14495 * PARAMETERS : @buffer: image buffer for the callback
14496 *
14497 * RETURN : None
14498 *
14499 *==========================================================================*/
14500void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14501{
14502 bool buffer_found = false;
14503 for (auto req = mPendingBuffersInRequest.begin();
14504 req != mPendingBuffersInRequest.end(); req++) {
14505 for (auto k = req->mPendingBufferList.begin();
14506 k != req->mPendingBufferList.end(); k++ ) {
14507 if (k->buffer == buffer) {
14508 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14509 req->frame_number, buffer);
14510 k = req->mPendingBufferList.erase(k);
14511 if (req->mPendingBufferList.empty()) {
14512 // Remove this request from Map
14513 req = mPendingBuffersInRequest.erase(req);
14514 }
14515 buffer_found = true;
14516 break;
14517 }
14518 }
14519 if (buffer_found) {
14520 break;
14521 }
14522 }
14523 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14524 get_num_overall_buffers());
14525}
14526
14527/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014528 * FUNCTION : getBufErrStatus
14529 *
14530 * DESCRIPTION: get buffer error status
14531 *
14532 * PARAMETERS : @buffer: buffer handle
14533 *
14534 * RETURN : Error status
14535 *
14536 *==========================================================================*/
14537int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14538{
14539 for (auto& req : mPendingBuffersInRequest) {
14540 for (auto& k : req.mPendingBufferList) {
14541 if (k.buffer == buffer)
14542 return k.bufStatus;
14543 }
14544 }
14545 return CAMERA3_BUFFER_STATUS_OK;
14546}
14547
14548/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014549 * FUNCTION : setPAAFSupport
14550 *
14551 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14552 * feature mask according to stream type and filter
14553 * arrangement
14554 *
14555 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14556 * @stream_type: stream type
14557 * @filter_arrangement: filter arrangement
14558 *
14559 * RETURN : None
14560 *==========================================================================*/
14561void QCamera3HardwareInterface::setPAAFSupport(
14562 cam_feature_mask_t& feature_mask,
14563 cam_stream_type_t stream_type,
14564 cam_color_filter_arrangement_t filter_arrangement)
14565{
Thierry Strudel3d639192016-09-09 11:52:26 -070014566 switch (filter_arrangement) {
14567 case CAM_FILTER_ARRANGEMENT_RGGB:
14568 case CAM_FILTER_ARRANGEMENT_GRBG:
14569 case CAM_FILTER_ARRANGEMENT_GBRG:
14570 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014571 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14572 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014573 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014574 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14575 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014576 }
14577 break;
14578 case CAM_FILTER_ARRANGEMENT_Y:
14579 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14580 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14581 }
14582 break;
14583 default:
14584 break;
14585 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014586 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14587 feature_mask, stream_type, filter_arrangement);
14588
14589
Thierry Strudel3d639192016-09-09 11:52:26 -070014590}
14591
14592/*===========================================================================
14593* FUNCTION : getSensorMountAngle
14594*
14595* DESCRIPTION: Retrieve sensor mount angle
14596*
14597* PARAMETERS : None
14598*
14599* RETURN : sensor mount angle in uint32_t
14600*==========================================================================*/
14601uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14602{
14603 return gCamCapability[mCameraId]->sensor_mount_angle;
14604}
14605
14606/*===========================================================================
14607* FUNCTION : getRelatedCalibrationData
14608*
14609* DESCRIPTION: Retrieve related system calibration data
14610*
14611* PARAMETERS : None
14612*
14613* RETURN : Pointer of related system calibration data
14614*==========================================================================*/
14615const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14616{
14617 return (const cam_related_system_calibration_data_t *)
14618 &(gCamCapability[mCameraId]->related_cam_calibration);
14619}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014620
14621/*===========================================================================
14622 * FUNCTION : is60HzZone
14623 *
14624 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14625 *
14626 * PARAMETERS : None
14627 *
14628 * RETURN : True if in 60Hz zone, False otherwise
14629 *==========================================================================*/
14630bool QCamera3HardwareInterface::is60HzZone()
14631{
14632 time_t t = time(NULL);
14633 struct tm lt;
14634
14635 struct tm* r = localtime_r(&t, &lt);
14636
14637 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14638 return true;
14639 else
14640 return false;
14641}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014642
14643/*===========================================================================
14644 * FUNCTION : adjustBlackLevelForCFA
14645 *
14646 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14647 * of bayer CFA (Color Filter Array).
14648 *
14649 * PARAMETERS : @input: black level pattern in the order of RGGB
14650 * @output: black level pattern in the order of CFA
14651 * @color_arrangement: CFA color arrangement
14652 *
14653 * RETURN : None
14654 *==========================================================================*/
14655template<typename T>
14656void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14657 T input[BLACK_LEVEL_PATTERN_CNT],
14658 T output[BLACK_LEVEL_PATTERN_CNT],
14659 cam_color_filter_arrangement_t color_arrangement)
14660{
14661 switch (color_arrangement) {
14662 case CAM_FILTER_ARRANGEMENT_GRBG:
14663 output[0] = input[1];
14664 output[1] = input[0];
14665 output[2] = input[3];
14666 output[3] = input[2];
14667 break;
14668 case CAM_FILTER_ARRANGEMENT_GBRG:
14669 output[0] = input[2];
14670 output[1] = input[3];
14671 output[2] = input[0];
14672 output[3] = input[1];
14673 break;
14674 case CAM_FILTER_ARRANGEMENT_BGGR:
14675 output[0] = input[3];
14676 output[1] = input[2];
14677 output[2] = input[1];
14678 output[3] = input[0];
14679 break;
14680 case CAM_FILTER_ARRANGEMENT_RGGB:
14681 output[0] = input[0];
14682 output[1] = input[1];
14683 output[2] = input[2];
14684 output[3] = input[3];
14685 break;
14686 default:
14687 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14688 break;
14689 }
14690}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014691
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014692void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14693 CameraMetadata &resultMetadata,
14694 std::shared_ptr<metadata_buffer_t> settings)
14695{
14696 if (settings == nullptr) {
14697 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14698 return;
14699 }
14700
14701 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14702 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14703 }
14704
14705 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14706 String8 str((const char *)gps_methods);
14707 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14708 }
14709
14710 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14711 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14712 }
14713
14714 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14715 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14716 }
14717
14718 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14719 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14720 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14721 }
14722
14723 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14724 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14725 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14726 }
14727
14728 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14729 int32_t fwk_thumb_size[2];
14730 fwk_thumb_size[0] = thumb_size->width;
14731 fwk_thumb_size[1] = thumb_size->height;
14732 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14733 }
14734
14735 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14736 uint8_t fwk_intent = intent[0];
14737 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14738 }
14739}
14740
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014741bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
14742 const camera3_capture_request_t &request, const CameraMetadata &metadata) {
Chien-Yu Chenec328c82017-08-30 16:41:08 -070014743 if (metadata.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
14744 metadata.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 1) {
14745 ALOGV("%s: NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS is 1", __FUNCTION__);
14746 return false;
14747 }
14748
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014749 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14750 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14751 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014752 ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014753 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014754 return false;
14755 }
14756
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014757 if (!metadata.exists(ANDROID_EDGE_MODE) ||
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014758 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14759 ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014760 return false;
14761 }
14762
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014763 if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
14764 metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
14765 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
14766 ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
14767 return false;
14768 }
14769
14770 if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
14771 (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
14772 metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
14773 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
14774 ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
14775 return false;
14776 }
14777
14778 if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
14779 metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
14780 ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
14781 return false;
14782 }
14783
14784 if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
14785 metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
14786 ANDROID_CONTROL_EFFECT_MODE_OFF) {
14787 ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
14788 return false;
14789 }
14790
14791 if (!metadata.exists(ANDROID_CONTROL_MODE) ||
14792 (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
14793 metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
14794 ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
14795 ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
14796 return false;
14797 }
14798
14799 // TODO (b/32585046): support non-ZSL.
14800 if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
14801 metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
14802 ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
14803 return false;
14804 }
14805
14806 // TODO (b/32586081): support flash.
14807 if (!metadata.exists(ANDROID_FLASH_MODE) ||
14808 metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
14809 ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
14810 return false;
14811 }
14812
14813 // TODO (b/36492953): support digital zoom.
14814 if (!metadata.exists(ANDROID_SCALER_CROP_REGION) ||
14815 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[0] != 0 ||
14816 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[1] != 0 ||
14817 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[2] !=
14818 gCamCapability[mCameraId]->active_array_size.width ||
14819 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[3] !=
14820 gCamCapability[mCameraId]->active_array_size.height) {
14821 ALOGV("%s: ANDROID_SCALER_CROP_REGION is not the same as active array region.",
14822 __FUNCTION__);
14823 return false;
14824 }
14825
14826 if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
14827 metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
14828 ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
14829 return false;
14830 }
14831
14832 // TODO (b/36693254, b/36690506): support other outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014833 if (request.num_output_buffers != 1 ||
14834 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014835 ALOGV("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014836 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014837 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014838 request.output_buffers[0].stream->width,
14839 request.output_buffers[0].stream->height,
14840 request.output_buffers[0].stream->format);
14841 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014842 return false;
14843 }
14844
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014845 return true;
14846}
14847
14848bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14849 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14850 const CameraMetadata &metadata)
14851{
14852 if (hdrPlusRequest == nullptr) return false;
14853 if (!isRequestHdrPlusCompatible(request, metadata)) return false;
14854
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014855 // Get a YUV buffer from pic channel.
14856 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14857 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14858 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14859 if (res != OK) {
14860 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14861 __FUNCTION__, strerror(-res), res);
14862 return false;
14863 }
14864
14865 pbcamera::StreamBuffer buffer;
14866 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014867 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chencec36ed2017-07-21 13:54:29 -070014868 buffer.data = yuvBuffer->fd == -1 ? yuvBuffer->buffer : nullptr;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014869 buffer.dataSize = yuvBuffer->frame_len;
14870
14871 pbcamera::CaptureRequest pbRequest;
14872 pbRequest.id = request.frame_number;
14873 pbRequest.outputBuffers.push_back(buffer);
14874
14875 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070014876 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014877 if (res != OK) {
14878 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14879 strerror(-res), res);
14880 return false;
14881 }
14882
14883 hdrPlusRequest->yuvBuffer = yuvBuffer;
14884 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14885
14886 return true;
14887}
14888
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014889status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14890{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014891 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14892 return OK;
14893 }
14894
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014895 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014896 if (res != OK) {
14897 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14898 strerror(-res), res);
14899 return res;
14900 }
14901 gHdrPlusClientOpening = true;
14902
14903 return OK;
14904}
14905
Chien-Yu Chenee335912017-02-09 17:53:20 -080014906status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14907{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014908 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014909
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014910 if (mHdrPlusModeEnabled) {
14911 return OK;
14912 }
14913
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014914 // Check if gHdrPlusClient is opened or being opened.
14915 if (gHdrPlusClient == nullptr) {
14916 if (gHdrPlusClientOpening) {
14917 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14918 return OK;
14919 }
14920
14921 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014922 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014923 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14924 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014925 return res;
14926 }
14927
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014928 // When opening HDR+ client completes, HDR+ mode will be enabled.
14929 return OK;
14930
Chien-Yu Chenee335912017-02-09 17:53:20 -080014931 }
14932
14933 // Configure stream for HDR+.
14934 res = configureHdrPlusStreamsLocked();
14935 if (res != OK) {
14936 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014937 return res;
14938 }
14939
14940 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14941 res = gHdrPlusClient->setZslHdrPlusMode(true);
14942 if (res != OK) {
14943 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014944 return res;
14945 }
14946
14947 mHdrPlusModeEnabled = true;
14948 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14949
14950 return OK;
14951}
14952
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014953void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
14954{
14955 if (gHdrPlusClientOpening) {
14956 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
14957 }
14958}
14959
Chien-Yu Chenee335912017-02-09 17:53:20 -080014960void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14961{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014962 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014963 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014964 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14965 if (res != OK) {
14966 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14967 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014968
14969 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014970 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014971 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014972 }
14973
14974 mHdrPlusModeEnabled = false;
14975 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14976}
14977
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070014978bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
14979{
14980 // Check if mPictureChannel is valid.
14981 // TODO: Support YUV (b/36693254) and RAW (b/36690506)
14982 if (mPictureChannel == nullptr) {
14983 return false;
14984 }
14985
14986 return true;
14987}
14988
Chien-Yu Chenee335912017-02-09 17:53:20 -080014989status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014990{
14991 pbcamera::InputConfiguration inputConfig;
14992 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14993 status_t res = OK;
14994
14995 // Configure HDR+ client streams.
14996 // Get input config.
14997 if (mHdrPlusRawSrcChannel) {
14998 // HDR+ input buffers will be provided by HAL.
14999 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
15000 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
15001 if (res != OK) {
15002 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
15003 __FUNCTION__, strerror(-res), res);
15004 return res;
15005 }
15006
15007 inputConfig.isSensorInput = false;
15008 } else {
15009 // Sensor MIPI will send data to Easel.
15010 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080015011 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015012 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
15013 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
15014 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
15015 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
15016 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070015017 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015018 if (mSensorModeInfo.num_raw_bits != 10) {
15019 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
15020 mSensorModeInfo.num_raw_bits);
15021 return BAD_VALUE;
15022 }
15023
15024 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015025 }
15026
15027 // Get output configurations.
15028 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080015029 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015030
15031 // Easel may need to output YUV output buffers if mPictureChannel was created.
15032 pbcamera::StreamConfiguration yuvOutputConfig;
15033 if (mPictureChannel != nullptr) {
15034 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
15035 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
15036 if (res != OK) {
15037 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
15038 __FUNCTION__, strerror(-res), res);
15039
15040 return res;
15041 }
15042
15043 outputStreamConfigs.push_back(yuvOutputConfig);
15044 }
15045
15046 // TODO: consider other channels for YUV output buffers.
15047
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015048 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015049 if (res != OK) {
15050 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
15051 strerror(-res), res);
15052 return res;
15053 }
15054
15055 return OK;
15056}
15057
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015058void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
15059{
15060 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
15061 // Set HAL state to error.
15062 pthread_mutex_lock(&mMutex);
15063 mState = ERROR;
15064 pthread_mutex_unlock(&mMutex);
15065
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015066 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015067}
15068
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015069void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15070{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015071 if (client == nullptr) {
15072 ALOGE("%s: Opened client is null.", __FUNCTION__);
15073 return;
15074 }
15075
Chien-Yu Chene96475e2017-04-11 11:53:26 -070015076 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015077 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15078
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015079 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015080 if (!gHdrPlusClientOpening) {
15081 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15082 return;
15083 }
15084
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015085 gHdrPlusClient = std::move(client);
15086 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015087 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015088
15089 // Set static metadata.
15090 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15091 if (res != OK) {
15092 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15093 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015094 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015095 gHdrPlusClient = nullptr;
15096 return;
15097 }
15098
15099 // Enable HDR+ mode.
15100 res = enableHdrPlusModeLocked();
15101 if (res != OK) {
15102 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15103 }
15104}
15105
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015106void QCamera3HardwareInterface::onOpenFailed(status_t err)
15107{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015108 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015109 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015110 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015111 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015112}
15113
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015114void QCamera3HardwareInterface::onFatalError()
15115{
15116 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
15117
15118 // Set HAL state to error.
15119 pthread_mutex_lock(&mMutex);
15120 mState = ERROR;
15121 pthread_mutex_unlock(&mMutex);
15122
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015123 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015124}
15125
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070015126void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15127{
15128 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15129 __LINE__, requestId, apSensorTimestampNs);
15130
15131 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15132}
15133
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015134void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
15135{
15136 pthread_mutex_lock(&mMutex);
15137
15138 // Find the pending request for this result metadata.
15139 auto requestIter = mPendingRequestsList.begin();
15140 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15141 requestIter++;
15142 }
15143
15144 if (requestIter == mPendingRequestsList.end()) {
15145 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15146 pthread_mutex_unlock(&mMutex);
15147 return;
15148 }
15149
15150 requestIter->partial_result_cnt++;
15151
15152 CameraMetadata metadata;
15153 uint8_t ready = true;
15154 metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
15155
15156 // Send it to framework.
15157 camera3_capture_result_t result = {};
15158
15159 result.result = metadata.getAndLock();
15160 // Populate metadata result
15161 result.frame_number = requestId;
15162 result.num_output_buffers = 0;
15163 result.output_buffers = NULL;
15164 result.partial_result = requestIter->partial_result_cnt;
15165
15166 orchestrateResult(&result);
15167 metadata.unlock(result.result);
15168
15169 pthread_mutex_unlock(&mMutex);
15170}
15171
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070015172void QCamera3HardwareInterface::onPostview(uint32_t requestId,
15173 std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
15174 uint32_t stride, int32_t format)
15175{
15176 if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
15177 ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
15178 __LINE__, width, height, requestId);
15179 char buf[FILENAME_MAX] = {};
15180 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
15181 requestId, width, height);
15182
15183 pbcamera::StreamConfiguration config = {};
15184 config.image.width = width;
15185 config.image.height = height;
15186 config.image.format = format;
15187
15188 pbcamera::PlaneConfiguration plane = {};
15189 plane.stride = stride;
15190 plane.scanline = height;
15191
15192 config.image.planes.push_back(plane);
15193
15194 pbcamera::StreamBuffer buffer = {};
15195 buffer.streamId = 0;
15196 buffer.dmaBufFd = -1;
15197 buffer.data = postview->data();
15198 buffer.dataSize = postview->size();
15199
15200 hdrplus_client_utils::writePpm(buf, config, buffer);
15201 }
15202
15203 pthread_mutex_lock(&mMutex);
15204
15205 // Find the pending request for this result metadata.
15206 auto requestIter = mPendingRequestsList.begin();
15207 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15208 requestIter++;
15209 }
15210
15211 if (requestIter == mPendingRequestsList.end()) {
15212 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15213 pthread_mutex_unlock(&mMutex);
15214 return;
15215 }
15216
15217 requestIter->partial_result_cnt++;
15218
15219 CameraMetadata metadata;
15220 int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
15221 static_cast<int32_t>(stride)};
15222 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
15223 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
15224
15225 // Send it to framework.
15226 camera3_capture_result_t result = {};
15227
15228 result.result = metadata.getAndLock();
15229 // Populate metadata result
15230 result.frame_number = requestId;
15231 result.num_output_buffers = 0;
15232 result.output_buffers = NULL;
15233 result.partial_result = requestIter->partial_result_cnt;
15234
15235 orchestrateResult(&result);
15236 metadata.unlock(result.result);
15237
15238 pthread_mutex_unlock(&mMutex);
15239}
15240
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015241void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015242 const camera_metadata_t &resultMetadata)
15243{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015244 if (result != nullptr) {
15245 if (result->outputBuffers.size() != 1) {
15246 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
15247 result->outputBuffers.size());
15248 return;
15249 }
15250
15251 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
15252 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
15253 result->outputBuffers[0].streamId);
15254 return;
15255 }
15256
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015257 // TODO (b/34854987): initiate this from HDR+ service.
15258 onNextCaptureReady(result->requestId);
15259
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015260 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015261 HdrPlusPendingRequest pendingRequest;
15262 {
15263 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15264 auto req = mHdrPlusPendingRequests.find(result->requestId);
15265 pendingRequest = req->second;
15266 }
15267
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015268 // Update the result metadata with the settings of the HDR+ still capture request because
15269 // the result metadata belongs to a ZSL buffer.
15270 CameraMetadata metadata;
15271 metadata = &resultMetadata;
15272 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15273 camera_metadata_t* updatedResultMetadata = metadata.release();
15274
15275 QCamera3PicChannel *picChannel =
15276 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
15277
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015278 // Check if dumping HDR+ YUV output is enabled.
15279 char prop[PROPERTY_VALUE_MAX];
15280 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15281 bool dumpYuvOutput = atoi(prop);
15282
15283 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015284 // Dump yuv buffer to a ppm file.
15285 pbcamera::StreamConfiguration outputConfig;
15286 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
15287 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
15288 if (rc == OK) {
15289 char buf[FILENAME_MAX] = {};
15290 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15291 result->requestId, result->outputBuffers[0].streamId,
15292 outputConfig.image.width, outputConfig.image.height);
15293
15294 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
15295 } else {
15296 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
15297 __FUNCTION__, strerror(-rc), rc);
15298 }
15299 }
15300
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015301 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
15302 auto halMetadata = std::make_shared<metadata_buffer_t>();
15303 clear_metadata_buffer(halMetadata.get());
15304
15305 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
15306 // encoding.
15307 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15308 halStreamId, /*minFrameDuration*/0);
15309 if (res == OK) {
15310 // Return the buffer to pic channel for encoding.
15311 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
15312 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
15313 halMetadata);
15314 } else {
15315 // Return the buffer without encoding.
15316 // TODO: This should not happen but we may want to report an error buffer to camera
15317 // service.
15318 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
15319 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
15320 strerror(-res), res);
15321 }
15322
15323 // Send HDR+ metadata to framework.
15324 {
15325 pthread_mutex_lock(&mMutex);
15326
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015327 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15328 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015329 pthread_mutex_unlock(&mMutex);
15330 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015331
15332 // Remove the HDR+ pending request.
15333 {
15334 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15335 auto req = mHdrPlusPendingRequests.find(result->requestId);
15336 mHdrPlusPendingRequests.erase(req);
15337 }
15338 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015339}
15340
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015341void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15342{
15343 if (failedResult == nullptr) {
15344 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15345 return;
15346 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015347
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015348 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015349
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015350 // Remove the pending HDR+ request.
15351 {
15352 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15353 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
15354
15355 // Return the buffer to pic channel.
15356 QCamera3PicChannel *picChannel =
15357 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
15358 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
15359
15360 mHdrPlusPendingRequests.erase(pendingRequest);
15361 }
15362
15363 pthread_mutex_lock(&mMutex);
15364
15365 // Find the pending buffers.
15366 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15367 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15368 if (pendingBuffers->frame_number == failedResult->requestId) {
15369 break;
15370 }
15371 pendingBuffers++;
15372 }
15373
15374 // Send out buffer errors for the pending buffers.
15375 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15376 std::vector<camera3_stream_buffer_t> streamBuffers;
15377 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15378 // Prepare a stream buffer.
15379 camera3_stream_buffer_t streamBuffer = {};
15380 streamBuffer.stream = buffer.stream;
15381 streamBuffer.buffer = buffer.buffer;
15382 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15383 streamBuffer.acquire_fence = -1;
15384 streamBuffer.release_fence = -1;
15385
15386 streamBuffers.push_back(streamBuffer);
15387
15388 // Send out error buffer event.
15389 camera3_notify_msg_t notify_msg = {};
15390 notify_msg.type = CAMERA3_MSG_ERROR;
15391 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15392 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15393 notify_msg.message.error.error_stream = buffer.stream;
15394
15395 orchestrateNotify(&notify_msg);
15396 }
15397
15398 camera3_capture_result_t result = {};
15399 result.frame_number = pendingBuffers->frame_number;
15400 result.num_output_buffers = streamBuffers.size();
15401 result.output_buffers = &streamBuffers[0];
15402
15403 // Send out result with buffer errors.
15404 orchestrateResult(&result);
15405
15406 // Remove pending buffers.
15407 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15408 }
15409
15410 // Remove pending request.
15411 auto halRequest = mPendingRequestsList.begin();
15412 while (halRequest != mPendingRequestsList.end()) {
15413 if (halRequest->frame_number == failedResult->requestId) {
15414 mPendingRequestsList.erase(halRequest);
15415 break;
15416 }
15417 halRequest++;
15418 }
15419
15420 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015421}
15422
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015423
15424ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15425 mParent(parent) {}
15426
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015427void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015428{
15429 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015430
15431 if (isReprocess) {
15432 mReprocessShutters.emplace(frameNumber, Shutter());
15433 } else {
15434 mShutters.emplace(frameNumber, Shutter());
15435 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015436}
15437
15438void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15439{
15440 std::lock_guard<std::mutex> lock(mLock);
15441
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015442 std::map<uint32_t, Shutter> *shutters = nullptr;
15443
15444 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015445 auto shutter = mShutters.find(frameNumber);
15446 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015447 shutter = mReprocessShutters.find(frameNumber);
15448 if (shutter == mReprocessShutters.end()) {
15449 // Shutter was already sent.
15450 return;
15451 }
15452 shutters = &mReprocessShutters;
15453 } else {
15454 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015455 }
15456
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015457 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015458 shutter->second.ready = true;
15459 shutter->second.timestamp = timestamp;
15460
15461 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015462 shutter = shutters->begin();
15463 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015464 if (!shutter->second.ready) {
15465 // If this shutter is not ready, the following shutters can't be sent.
15466 break;
15467 }
15468
15469 camera3_notify_msg_t msg = {};
15470 msg.type = CAMERA3_MSG_SHUTTER;
15471 msg.message.shutter.frame_number = shutter->first;
15472 msg.message.shutter.timestamp = shutter->second.timestamp;
15473 mParent->orchestrateNotify(&msg);
15474
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015475 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015476 }
15477}
15478
15479void ShutterDispatcher::clear(uint32_t frameNumber)
15480{
15481 std::lock_guard<std::mutex> lock(mLock);
15482 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015483 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015484}
15485
15486void ShutterDispatcher::clear()
15487{
15488 std::lock_guard<std::mutex> lock(mLock);
15489
15490 // Log errors for stale shutters.
15491 for (auto &shutter : mShutters) {
15492 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15493 __FUNCTION__, shutter.first, shutter.second.ready,
15494 shutter.second.timestamp);
15495 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015496
15497 // Log errors for stale reprocess shutters.
15498 for (auto &shutter : mReprocessShutters) {
15499 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15500 __FUNCTION__, shutter.first, shutter.second.ready,
15501 shutter.second.timestamp);
15502 }
15503
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015504 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015505 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015506}
15507
15508OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15509 mParent(parent) {}
15510
15511status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15512{
15513 std::lock_guard<std::mutex> lock(mLock);
15514 mStreamBuffers.clear();
15515 if (!streamList) {
15516 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15517 return -EINVAL;
15518 }
15519
15520 // Create a "frame-number -> buffer" map for each stream.
15521 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15522 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15523 }
15524
15525 return OK;
15526}
15527
15528status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15529{
15530 std::lock_guard<std::mutex> lock(mLock);
15531
15532 // Find the "frame-number -> buffer" map for the stream.
15533 auto buffers = mStreamBuffers.find(stream);
15534 if (buffers == mStreamBuffers.end()) {
15535 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15536 return -EINVAL;
15537 }
15538
15539 // Create an unready buffer for this frame number.
15540 buffers->second.emplace(frameNumber, Buffer());
15541 return OK;
15542}
15543
15544void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15545 const camera3_stream_buffer_t &buffer)
15546{
15547 std::lock_guard<std::mutex> lock(mLock);
15548
15549 // Find the frame number -> buffer map for the stream.
15550 auto buffers = mStreamBuffers.find(buffer.stream);
15551 if (buffers == mStreamBuffers.end()) {
15552 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15553 return;
15554 }
15555
15556 // Find the unready buffer this frame number and mark it ready.
15557 auto pendingBuffer = buffers->second.find(frameNumber);
15558 if (pendingBuffer == buffers->second.end()) {
15559 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15560 return;
15561 }
15562
15563 pendingBuffer->second.ready = true;
15564 pendingBuffer->second.buffer = buffer;
15565
15566 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15567 pendingBuffer = buffers->second.begin();
15568 while (pendingBuffer != buffers->second.end()) {
15569 if (!pendingBuffer->second.ready) {
15570 // If this buffer is not ready, the following buffers can't be sent.
15571 break;
15572 }
15573
15574 camera3_capture_result_t result = {};
15575 result.frame_number = pendingBuffer->first;
15576 result.num_output_buffers = 1;
15577 result.output_buffers = &pendingBuffer->second.buffer;
15578
15579 // Send out result with buffer errors.
15580 mParent->orchestrateResult(&result);
15581
15582 pendingBuffer = buffers->second.erase(pendingBuffer);
15583 }
15584}
15585
15586void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15587{
15588 std::lock_guard<std::mutex> lock(mLock);
15589
15590 // Log errors for stale buffers.
15591 for (auto &buffers : mStreamBuffers) {
15592 for (auto &buffer : buffers.second) {
15593 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15594 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15595 }
15596 buffers.second.clear();
15597 }
15598
15599 if (clearConfiguredStreams) {
15600 mStreamBuffers.clear();
15601 }
15602}
15603
Thierry Strudel3d639192016-09-09 11:52:26 -070015604}; //end namespace qcamera