blob: 68173949f484a1459847e911982f39ebbd95be54 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070075// mm_camera has 2 partial results: 3A, and final result.
76// HDR+ requests have 3 partial results: postview, next request ready, and final result.
77#define PARTIAL_RESULT_COUNT 3
Thierry Strudel3d639192016-09-09 11:52:26 -070078#define FRAME_SKIP_DELAY 0
79
80#define MAX_VALUE_8BIT ((1<<8)-1)
81#define MAX_VALUE_10BIT ((1<<10)-1)
82#define MAX_VALUE_12BIT ((1<<12)-1)
83
84#define VIDEO_4K_WIDTH 3840
85#define VIDEO_4K_HEIGHT 2160
86
Jason Leeb9e76432017-03-10 17:14:19 -080087#define MAX_EIS_WIDTH 3840
88#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070089
90#define MAX_RAW_STREAMS 1
91#define MAX_STALLING_STREAMS 1
92#define MAX_PROCESSED_STREAMS 3
93/* Batch mode is enabled only if FPS set is equal to or greater than this */
94#define MIN_FPS_FOR_BATCH_MODE (120)
95#define PREVIEW_FPS_FOR_HFR (30)
96#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080097#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070098#define MAX_HFR_BATCH_SIZE (8)
99#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +0100101#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700137// Whether to check for the GPU stride padding, or use the default
138//#define CHECK_GPU_PIXEL_ALIGNMENT
139
Thierry Strudel3d639192016-09-09 11:52:26 -0700140cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142extern pthread_mutex_t gCamLock;
143volatile uint32_t gCamHal3LogLevel = 1;
144extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800146// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700148std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700152std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700155
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800156// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
157bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700158
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700159std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700160
Thierry Strudel3d639192016-09-09 11:52:26 -0700161
162const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
163 {"On", CAM_CDS_MODE_ON},
164 {"Off", CAM_CDS_MODE_OFF},
165 {"Auto",CAM_CDS_MODE_AUTO}
166};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700167const QCamera3HardwareInterface::QCameraMap<
168 camera_metadata_enum_android_video_hdr_mode_t,
169 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
170 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
171 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
172};
173
Thierry Strudel54dc9782017-02-15 12:12:10 -0800174const QCamera3HardwareInterface::QCameraMap<
175 camera_metadata_enum_android_binning_correction_mode_t,
176 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
177 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
178 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
179};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700180
181const QCamera3HardwareInterface::QCameraMap<
182 camera_metadata_enum_android_ir_mode_t,
183 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
184 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
185 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
186 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
187};
Thierry Strudel3d639192016-09-09 11:52:26 -0700188
189const QCamera3HardwareInterface::QCameraMap<
190 camera_metadata_enum_android_control_effect_mode_t,
191 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
192 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
193 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
194 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
195 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
197 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
198 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
199 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
201};
202
203const QCamera3HardwareInterface::QCameraMap<
204 camera_metadata_enum_android_control_awb_mode_t,
205 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
206 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
207 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
208 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
209 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
210 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
211 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
212 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
214 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
215};
216
217const QCamera3HardwareInterface::QCameraMap<
218 camera_metadata_enum_android_control_scene_mode_t,
219 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
220 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
221 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
222 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
223 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
224 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
226 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
227 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
228 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
229 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
230 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
231 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
232 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
233 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
234 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800235 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
236 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700237};
238
239const QCamera3HardwareInterface::QCameraMap<
240 camera_metadata_enum_android_control_af_mode_t,
241 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
242 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
244 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
245 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
246 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
247 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
249};
250
251const QCamera3HardwareInterface::QCameraMap<
252 camera_metadata_enum_android_color_correction_aberration_mode_t,
253 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
255 CAM_COLOR_CORRECTION_ABERRATION_OFF },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
257 CAM_COLOR_CORRECTION_ABERRATION_FAST },
258 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
259 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
260};
261
262const QCamera3HardwareInterface::QCameraMap<
263 camera_metadata_enum_android_control_ae_antibanding_mode_t,
264 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
269};
270
271const QCamera3HardwareInterface::QCameraMap<
272 camera_metadata_enum_android_control_ae_mode_t,
273 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
274 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
275 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
277 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
Shuzhen Wang3d11a642017-08-18 09:57:29 -0700278 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
279 { (camera_metadata_enum_android_control_ae_mode_t)
280 NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
Thierry Strudel3d639192016-09-09 11:52:26 -0700281};
282
283const QCamera3HardwareInterface::QCameraMap<
284 camera_metadata_enum_android_flash_mode_t,
285 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
286 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
287 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
288 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
289};
290
291const QCamera3HardwareInterface::QCameraMap<
292 camera_metadata_enum_android_statistics_face_detect_mode_t,
293 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
297};
298
299const QCamera3HardwareInterface::QCameraMap<
300 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
301 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
303 CAM_FOCUS_UNCALIBRATED },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
305 CAM_FOCUS_APPROXIMATE },
306 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
307 CAM_FOCUS_CALIBRATED }
308};
309
310const QCamera3HardwareInterface::QCameraMap<
311 camera_metadata_enum_android_lens_state_t,
312 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
313 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
314 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
315};
316
317const int32_t available_thumbnail_sizes[] = {0, 0,
318 176, 144,
319 240, 144,
320 256, 144,
321 240, 160,
322 256, 154,
323 240, 240,
324 320, 240};
325
326const QCamera3HardwareInterface::QCameraMap<
327 camera_metadata_enum_android_sensor_test_pattern_mode_t,
328 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
335};
336
337/* Since there is no mapping for all the options some Android enum are not listed.
338 * Also, the order in this list is important because while mapping from HAL to Android it will
339 * traverse from lower to higher index which means that for HAL values that are map to different
340 * Android values, the traverse logic will select the first one found.
341 */
342const QCamera3HardwareInterface::QCameraMap<
343 camera_metadata_enum_android_sensor_reference_illuminant1_t,
344 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
361};
362
363const QCamera3HardwareInterface::QCameraMap<
364 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
365 { 60, CAM_HFR_MODE_60FPS},
366 { 90, CAM_HFR_MODE_90FPS},
367 { 120, CAM_HFR_MODE_120FPS},
368 { 150, CAM_HFR_MODE_150FPS},
369 { 180, CAM_HFR_MODE_180FPS},
370 { 210, CAM_HFR_MODE_210FPS},
371 { 240, CAM_HFR_MODE_240FPS},
372 { 480, CAM_HFR_MODE_480FPS},
373};
374
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700375const QCamera3HardwareInterface::QCameraMap<
376 qcamera3_ext_instant_aec_mode_t,
377 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
378 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
381};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800382
383const QCamera3HardwareInterface::QCameraMap<
384 qcamera3_ext_exposure_meter_mode_t,
385 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
386 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
387 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
388 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
389 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
390 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
391 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
392 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
393};
394
395const QCamera3HardwareInterface::QCameraMap<
396 qcamera3_ext_iso_mode_t,
397 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
398 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
399 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
400 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
401 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
402 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
403 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
404 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
405 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
406};
407
Thierry Strudel3d639192016-09-09 11:52:26 -0700408camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
409 .initialize = QCamera3HardwareInterface::initialize,
410 .configure_streams = QCamera3HardwareInterface::configure_streams,
411 .register_stream_buffers = NULL,
412 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
413 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
414 .get_metadata_vendor_tag_ops = NULL,
415 .dump = QCamera3HardwareInterface::dump,
416 .flush = QCamera3HardwareInterface::flush,
417 .reserved = {0},
418};
419
420// initialise to some default value
421uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
422
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700423static inline void logEaselEvent(const char *tag, const char *event) {
424 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
425 struct timespec ts = {};
426 static int64_t kMsPerSec = 1000;
427 static int64_t kNsPerMs = 1000000;
428 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
429 if (res != OK) {
430 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
431 } else {
432 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
433 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
434 }
435 }
436}
437
Thierry Strudel3d639192016-09-09 11:52:26 -0700438/*===========================================================================
439 * FUNCTION : QCamera3HardwareInterface
440 *
441 * DESCRIPTION: constructor of QCamera3HardwareInterface
442 *
443 * PARAMETERS :
444 * @cameraId : camera ID
445 *
446 * RETURN : none
447 *==========================================================================*/
448QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
449 const camera_module_callbacks_t *callbacks)
450 : mCameraId(cameraId),
451 mCameraHandle(NULL),
452 mCameraInitialized(false),
453 mCallbackOps(NULL),
454 mMetadataChannel(NULL),
455 mPictureChannel(NULL),
456 mRawChannel(NULL),
457 mSupportChannel(NULL),
458 mAnalysisChannel(NULL),
459 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700460 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700461 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800462 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100463 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800464 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700465 mChannelHandle(0),
466 mFirstConfiguration(true),
467 mFlush(false),
468 mFlushPerf(false),
469 mParamHeap(NULL),
470 mParameters(NULL),
471 mPrevParameters(NULL),
472 m_bIsVideo(false),
473 m_bIs4KVideo(false),
474 m_bEisSupportedSize(false),
475 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800476 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700477 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700478 mShutterDispatcher(this),
479 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700480 mMinProcessedFrameDuration(0),
481 mMinJpegFrameDuration(0),
482 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100483 mExpectedFrameDuration(0),
484 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700485 mMetaFrameCount(0U),
486 mUpdateDebugLevel(false),
487 mCallbacks(callbacks),
488 mCaptureIntent(0),
489 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800490 /* DevCamDebug metadata internal m control*/
491 mDevCamDebugMetaEnable(0),
492 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700493 mBatchSize(0),
494 mToBeQueuedVidBufs(0),
495 mHFRVideoFps(DEFAULT_VIDEO_FPS),
496 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800497 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800498 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700499 mFirstFrameNumberInBatch(0),
500 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800501 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700502 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
503 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000504 mPDSupported(false),
505 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700506 mInstantAEC(false),
507 mResetInstantAEC(false),
508 mInstantAECSettledFrameNumber(0),
509 mAecSkipDisplayFrameBound(0),
510 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700511 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800512 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700513 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700514 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700515 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700516 mState(CLOSED),
517 mIsDeviceLinked(false),
518 mIsMainCamera(true),
519 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700520 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800521 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800522 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700523 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800524 mIsApInputUsedForHdrPlus(false),
525 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700526 m_bSensorHDREnabled(false),
527 mAfTrigger()
Thierry Strudel3d639192016-09-09 11:52:26 -0700528{
529 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700530 mCommon.init(gCamCapability[cameraId]);
531 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700532#ifndef USE_HAL_3_3
533 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
534#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700535 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700536#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700537 mCameraDevice.common.close = close_camera_device;
538 mCameraDevice.ops = &mCameraOps;
539 mCameraDevice.priv = this;
540 gCamCapability[cameraId]->version = CAM_HAL_V3;
541 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
542 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
543 gCamCapability[cameraId]->min_num_pp_bufs = 3;
544
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800545 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700546
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800547 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700548 mPendingLiveRequest = 0;
549 mCurrentRequestId = -1;
550 pthread_mutex_init(&mMutex, NULL);
551
552 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
553 mDefaultMetadata[i] = NULL;
554
555 // Getting system props of different kinds
556 char prop[PROPERTY_VALUE_MAX];
557 memset(prop, 0, sizeof(prop));
558 property_get("persist.camera.raw.dump", prop, "0");
559 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800560 property_get("persist.camera.hal3.force.hdr", prop, "0");
561 mForceHdrSnapshot = atoi(prop);
562
Thierry Strudel3d639192016-09-09 11:52:26 -0700563 if (mEnableRawDump)
564 LOGD("Raw dump from Camera HAL enabled");
565
566 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
567 memset(mLdafCalib, 0, sizeof(mLdafCalib));
568
Arnd Geis082a4d72017-08-24 10:33:07 -0700569 memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
570
Thierry Strudel3d639192016-09-09 11:52:26 -0700571 memset(prop, 0, sizeof(prop));
572 property_get("persist.camera.tnr.preview", prop, "0");
573 m_bTnrPreview = (uint8_t)atoi(prop);
574
575 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800576 property_get("persist.camera.swtnr.preview", prop, "1");
577 m_bSwTnrPreview = (uint8_t)atoi(prop);
578
579 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700580 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700581 m_bTnrVideo = (uint8_t)atoi(prop);
582
583 memset(prop, 0, sizeof(prop));
584 property_get("persist.camera.avtimer.debug", prop, "0");
585 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800586 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700587
Thierry Strudel54dc9782017-02-15 12:12:10 -0800588 memset(prop, 0, sizeof(prop));
589 property_get("persist.camera.cacmode.disable", prop, "0");
590 m_cacModeDisabled = (uint8_t)atoi(prop);
591
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700592 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700593 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700594
Thierry Strudel3d639192016-09-09 11:52:26 -0700595 //Load and read GPU library.
596 lib_surface_utils = NULL;
597 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700598 mSurfaceStridePadding = CAM_PAD_TO_64;
599#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700600 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
601 if (lib_surface_utils) {
602 *(void **)&LINK_get_surface_pixel_alignment =
603 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
604 if (LINK_get_surface_pixel_alignment) {
605 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
606 }
607 dlclose(lib_surface_utils);
608 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700609#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000610 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
611 mPDSupported = (0 <= mPDIndex) ? true : false;
612
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700613 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700614}
615
616/*===========================================================================
617 * FUNCTION : ~QCamera3HardwareInterface
618 *
619 * DESCRIPTION: destructor of QCamera3HardwareInterface
620 *
621 * PARAMETERS : none
622 *
623 * RETURN : none
624 *==========================================================================*/
625QCamera3HardwareInterface::~QCamera3HardwareInterface()
626{
627 LOGD("E");
628
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800629 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700630
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800631 // Disable power hint and enable the perf lock for close camera
632 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
633 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
634
635 // unlink of dualcam during close camera
636 if (mIsDeviceLinked) {
637 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
638 &m_pDualCamCmdPtr->bundle_info;
639 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
640 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
641 pthread_mutex_lock(&gCamLock);
642
643 if (mIsMainCamera == 1) {
644 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
645 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
646 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
647 // related session id should be session id of linked session
648 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
649 } else {
650 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
651 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
652 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
653 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
654 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800655 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800656 pthread_mutex_unlock(&gCamLock);
657
658 rc = mCameraHandle->ops->set_dual_cam_cmd(
659 mCameraHandle->camera_handle);
660 if (rc < 0) {
661 LOGE("Dualcam: Unlink failed, but still proceed to close");
662 }
663 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700664
665 /* We need to stop all streams before deleting any stream */
666 if (mRawDumpChannel) {
667 mRawDumpChannel->stop();
668 }
669
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700670 if (mHdrPlusRawSrcChannel) {
671 mHdrPlusRawSrcChannel->stop();
672 }
673
Thierry Strudel3d639192016-09-09 11:52:26 -0700674 // NOTE: 'camera3_stream_t *' objects are already freed at
675 // this stage by the framework
676 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
677 it != mStreamInfo.end(); it++) {
678 QCamera3ProcessingChannel *channel = (*it)->channel;
679 if (channel) {
680 channel->stop();
681 }
682 }
683 if (mSupportChannel)
684 mSupportChannel->stop();
685
686 if (mAnalysisChannel) {
687 mAnalysisChannel->stop();
688 }
689 if (mMetadataChannel) {
690 mMetadataChannel->stop();
691 }
692 if (mChannelHandle) {
693 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -0700694 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700695 LOGD("stopping channel %d", mChannelHandle);
696 }
697
698 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
699 it != mStreamInfo.end(); it++) {
700 QCamera3ProcessingChannel *channel = (*it)->channel;
701 if (channel)
702 delete channel;
703 free (*it);
704 }
705 if (mSupportChannel) {
706 delete mSupportChannel;
707 mSupportChannel = NULL;
708 }
709
710 if (mAnalysisChannel) {
711 delete mAnalysisChannel;
712 mAnalysisChannel = NULL;
713 }
714 if (mRawDumpChannel) {
715 delete mRawDumpChannel;
716 mRawDumpChannel = NULL;
717 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700718 if (mHdrPlusRawSrcChannel) {
719 delete mHdrPlusRawSrcChannel;
720 mHdrPlusRawSrcChannel = NULL;
721 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700722 if (mDummyBatchChannel) {
723 delete mDummyBatchChannel;
724 mDummyBatchChannel = NULL;
725 }
726
727 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800728 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700729
730 if (mMetadataChannel) {
731 delete mMetadataChannel;
732 mMetadataChannel = NULL;
733 }
734
735 /* Clean up all channels */
736 if (mCameraInitialized) {
737 if(!mFirstConfiguration){
738 //send the last unconfigure
739 cam_stream_size_info_t stream_config_info;
740 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
741 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
742 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800743 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700744 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700745 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700746 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
747 stream_config_info);
748 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
749 if (rc < 0) {
750 LOGE("set_parms failed for unconfigure");
751 }
752 }
753 deinitParameters();
754 }
755
756 if (mChannelHandle) {
757 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
758 mChannelHandle);
759 LOGH("deleting channel %d", mChannelHandle);
760 mChannelHandle = 0;
761 }
762
763 if (mState != CLOSED)
764 closeCamera();
765
766 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
767 req.mPendingBufferList.clear();
768 }
769 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700770 for (pendingRequestIterator i = mPendingRequestsList.begin();
771 i != mPendingRequestsList.end();) {
772 i = erasePendingRequest(i);
773 }
774 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
775 if (mDefaultMetadata[i])
776 free_camera_metadata(mDefaultMetadata[i]);
777
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800778 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700779
780 pthread_cond_destroy(&mRequestCond);
781
782 pthread_cond_destroy(&mBuffersCond);
783
784 pthread_mutex_destroy(&mMutex);
785 LOGD("X");
786}
787
788/*===========================================================================
789 * FUNCTION : erasePendingRequest
790 *
791 * DESCRIPTION: function to erase a desired pending request after freeing any
792 * allocated memory
793 *
794 * PARAMETERS :
795 * @i : iterator pointing to pending request to be erased
796 *
797 * RETURN : iterator pointing to the next request
798 *==========================================================================*/
799QCamera3HardwareInterface::pendingRequestIterator
800 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
801{
802 if (i->input_buffer != NULL) {
803 free(i->input_buffer);
804 i->input_buffer = NULL;
805 }
806 if (i->settings != NULL)
807 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100808
809 mExpectedInflightDuration -= i->expectedFrameDuration;
810 if (mExpectedInflightDuration < 0) {
811 LOGE("Negative expected in-flight duration!");
812 mExpectedInflightDuration = 0;
813 }
814
Thierry Strudel3d639192016-09-09 11:52:26 -0700815 return mPendingRequestsList.erase(i);
816}
817
818/*===========================================================================
819 * FUNCTION : camEvtHandle
820 *
821 * DESCRIPTION: Function registered to mm-camera-interface to handle events
822 *
823 * PARAMETERS :
824 * @camera_handle : interface layer camera handle
825 * @evt : ptr to event
826 * @user_data : user data ptr
827 *
828 * RETURN : none
829 *==========================================================================*/
830void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
831 mm_camera_event_t *evt,
832 void *user_data)
833{
834 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
835 if (obj && evt) {
836 switch(evt->server_event_type) {
837 case CAM_EVENT_TYPE_DAEMON_DIED:
838 pthread_mutex_lock(&obj->mMutex);
839 obj->mState = ERROR;
840 pthread_mutex_unlock(&obj->mMutex);
841 LOGE("Fatal, camera daemon died");
842 break;
843
844 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
845 LOGD("HAL got request pull from Daemon");
846 pthread_mutex_lock(&obj->mMutex);
847 obj->mWokenUpByDaemon = true;
848 obj->unblockRequestIfNecessary();
849 pthread_mutex_unlock(&obj->mMutex);
850 break;
851
852 default:
853 LOGW("Warning: Unhandled event %d",
854 evt->server_event_type);
855 break;
856 }
857 } else {
858 LOGE("NULL user_data/evt");
859 }
860}
861
862/*===========================================================================
863 * FUNCTION : openCamera
864 *
865 * DESCRIPTION: open camera
866 *
867 * PARAMETERS :
868 * @hw_device : double ptr for camera device struct
869 *
870 * RETURN : int32_t type of status
871 * NO_ERROR -- success
872 * none-zero failure code
873 *==========================================================================*/
874int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
875{
876 int rc = 0;
877 if (mState != CLOSED) {
878 *hw_device = NULL;
879 return PERMISSION_DENIED;
880 }
881
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700882 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800883 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700884 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
885 mCameraId);
886
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700887 if (mCameraHandle) {
888 LOGE("Failure: Camera already opened");
889 return ALREADY_EXISTS;
890 }
891
892 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700893 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700894 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700895 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700896 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700897 if (rc != 0) {
898 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
899 return rc;
900 }
901 }
902 }
903
Thierry Strudel3d639192016-09-09 11:52:26 -0700904 rc = openCamera();
905 if (rc == 0) {
906 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800907 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700908 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700909
910 // Suspend Easel because opening camera failed.
911 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700912 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700913 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
914 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700915 if (suspendErr != 0) {
916 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
917 strerror(-suspendErr), suspendErr);
918 }
919 }
920 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800921 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700922
Thierry Strudel3d639192016-09-09 11:52:26 -0700923 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
924 mCameraId, rc);
925
926 if (rc == NO_ERROR) {
927 mState = OPENED;
928 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800929
Thierry Strudel3d639192016-09-09 11:52:26 -0700930 return rc;
931}
932
933/*===========================================================================
934 * FUNCTION : openCamera
935 *
936 * DESCRIPTION: open camera
937 *
938 * PARAMETERS : none
939 *
940 * RETURN : int32_t type of status
941 * NO_ERROR -- success
942 * none-zero failure code
943 *==========================================================================*/
944int QCamera3HardwareInterface::openCamera()
945{
946 int rc = 0;
947 char value[PROPERTY_VALUE_MAX];
948
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800949 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800950
Thierry Strudel3d639192016-09-09 11:52:26 -0700951 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
952 if (rc < 0) {
953 LOGE("Failed to reserve flash for camera id: %d",
954 mCameraId);
955 return UNKNOWN_ERROR;
956 }
957
958 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
959 if (rc) {
960 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
961 return rc;
962 }
963
964 if (!mCameraHandle) {
965 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
966 return -ENODEV;
967 }
968
969 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
970 camEvtHandle, (void *)this);
971
972 if (rc < 0) {
973 LOGE("Error, failed to register event callback");
974 /* Not closing camera here since it is already handled in destructor */
975 return FAILED_TRANSACTION;
976 }
977
978 mExifParams.debug_params =
979 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
980 if (mExifParams.debug_params) {
981 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
982 } else {
983 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
984 return NO_MEMORY;
985 }
986 mFirstConfiguration = true;
987
988 //Notify display HAL that a camera session is active.
989 //But avoid calling the same during bootup because camera service might open/close
990 //cameras at boot time during its initialization and display service will also internally
991 //wait for camera service to initialize first while calling this display API, resulting in a
992 //deadlock situation. Since boot time camera open/close calls are made only to fetch
993 //capabilities, no need of this display bw optimization.
994 //Use "service.bootanim.exit" property to know boot status.
995 property_get("service.bootanim.exit", value, "0");
996 if (atoi(value) == 1) {
997 pthread_mutex_lock(&gCamLock);
998 if (gNumCameraSessions++ == 0) {
999 setCameraLaunchStatus(true);
1000 }
1001 pthread_mutex_unlock(&gCamLock);
1002 }
1003
1004 //fill the session id needed while linking dual cam
1005 pthread_mutex_lock(&gCamLock);
1006 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1007 &sessionId[mCameraId]);
1008 pthread_mutex_unlock(&gCamLock);
1009
1010 if (rc < 0) {
1011 LOGE("Error, failed to get sessiion id");
1012 return UNKNOWN_ERROR;
1013 } else {
1014 //Allocate related cam sync buffer
1015 //this is needed for the payload that goes along with bundling cmd for related
1016 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001017 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1018 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001019 if(rc != OK) {
1020 rc = NO_MEMORY;
1021 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1022 return NO_MEMORY;
1023 }
1024
1025 //Map memory for related cam sync buffer
1026 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001027 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1028 m_pDualCamCmdHeap->getFd(0),
1029 sizeof(cam_dual_camera_cmd_info_t),
1030 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001031 if(rc < 0) {
1032 LOGE("Dualcam: failed to map Related cam sync buffer");
1033 rc = FAILED_TRANSACTION;
1034 return NO_MEMORY;
1035 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001036 m_pDualCamCmdPtr =
1037 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001038 }
1039
1040 LOGH("mCameraId=%d",mCameraId);
1041
1042 return NO_ERROR;
1043}
1044
1045/*===========================================================================
1046 * FUNCTION : closeCamera
1047 *
1048 * DESCRIPTION: close camera
1049 *
1050 * PARAMETERS : none
1051 *
1052 * RETURN : int32_t type of status
1053 * NO_ERROR -- success
1054 * none-zero failure code
1055 *==========================================================================*/
1056int QCamera3HardwareInterface::closeCamera()
1057{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001058 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001059 int rc = NO_ERROR;
1060 char value[PROPERTY_VALUE_MAX];
1061
1062 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1063 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001064
1065 // unmap memory for related cam sync buffer
1066 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001067 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001068 if (NULL != m_pDualCamCmdHeap) {
1069 m_pDualCamCmdHeap->deallocate();
1070 delete m_pDualCamCmdHeap;
1071 m_pDualCamCmdHeap = NULL;
1072 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001073 }
1074
Thierry Strudel3d639192016-09-09 11:52:26 -07001075 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1076 mCameraHandle = NULL;
1077
1078 //reset session id to some invalid id
1079 pthread_mutex_lock(&gCamLock);
1080 sessionId[mCameraId] = 0xDEADBEEF;
1081 pthread_mutex_unlock(&gCamLock);
1082
1083 //Notify display HAL that there is no active camera session
1084 //but avoid calling the same during bootup. Refer to openCamera
1085 //for more details.
1086 property_get("service.bootanim.exit", value, "0");
1087 if (atoi(value) == 1) {
1088 pthread_mutex_lock(&gCamLock);
1089 if (--gNumCameraSessions == 0) {
1090 setCameraLaunchStatus(false);
1091 }
1092 pthread_mutex_unlock(&gCamLock);
1093 }
1094
Thierry Strudel3d639192016-09-09 11:52:26 -07001095 if (mExifParams.debug_params) {
1096 free(mExifParams.debug_params);
1097 mExifParams.debug_params = NULL;
1098 }
1099 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1100 LOGW("Failed to release flash for camera id: %d",
1101 mCameraId);
1102 }
1103 mState = CLOSED;
1104 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1105 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001106
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001107 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001108 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1109 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001110 if (gHdrPlusClient != nullptr) {
1111 // Disable HDR+ mode.
1112 disableHdrPlusModeLocked();
1113 // Disconnect Easel if it's connected.
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001114 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001115 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001116 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001117
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001118 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001119 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001120 if (rc != 0) {
1121 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1122 }
1123
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001124 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001125 if (rc != 0) {
1126 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1127 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001128 }
1129 }
1130
Thierry Strudel3d639192016-09-09 11:52:26 -07001131 return rc;
1132}
1133
1134/*===========================================================================
1135 * FUNCTION : initialize
1136 *
1137 * DESCRIPTION: Initialize frameworks callback functions
1138 *
1139 * PARAMETERS :
1140 * @callback_ops : callback function to frameworks
1141 *
1142 * RETURN :
1143 *
1144 *==========================================================================*/
1145int QCamera3HardwareInterface::initialize(
1146 const struct camera3_callback_ops *callback_ops)
1147{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001148 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001149 int rc;
1150
1151 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1152 pthread_mutex_lock(&mMutex);
1153
1154 // Validate current state
1155 switch (mState) {
1156 case OPENED:
1157 /* valid state */
1158 break;
1159 default:
1160 LOGE("Invalid state %d", mState);
1161 rc = -ENODEV;
1162 goto err1;
1163 }
1164
1165 rc = initParameters();
1166 if (rc < 0) {
1167 LOGE("initParamters failed %d", rc);
1168 goto err1;
1169 }
1170 mCallbackOps = callback_ops;
1171
1172 mChannelHandle = mCameraHandle->ops->add_channel(
1173 mCameraHandle->camera_handle, NULL, NULL, this);
1174 if (mChannelHandle == 0) {
1175 LOGE("add_channel failed");
1176 rc = -ENOMEM;
1177 pthread_mutex_unlock(&mMutex);
1178 return rc;
1179 }
1180
1181 pthread_mutex_unlock(&mMutex);
1182 mCameraInitialized = true;
1183 mState = INITIALIZED;
1184 LOGI("X");
1185 return 0;
1186
1187err1:
1188 pthread_mutex_unlock(&mMutex);
1189 return rc;
1190}
1191
1192/*===========================================================================
1193 * FUNCTION : validateStreamDimensions
1194 *
1195 * DESCRIPTION: Check if the configuration requested are those advertised
1196 *
1197 * PARAMETERS :
1198 * @stream_list : streams to be configured
1199 *
1200 * RETURN :
1201 *
1202 *==========================================================================*/
1203int QCamera3HardwareInterface::validateStreamDimensions(
1204 camera3_stream_configuration_t *streamList)
1205{
1206 int rc = NO_ERROR;
1207 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001208 uint32_t depthWidth = 0;
1209 uint32_t depthHeight = 0;
1210 if (mPDSupported) {
1211 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1212 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1213 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001214
1215 camera3_stream_t *inputStream = NULL;
1216 /*
1217 * Loop through all streams to find input stream if it exists*
1218 */
1219 for (size_t i = 0; i< streamList->num_streams; i++) {
1220 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1221 if (inputStream != NULL) {
1222 LOGE("Error, Multiple input streams requested");
1223 return -EINVAL;
1224 }
1225 inputStream = streamList->streams[i];
1226 }
1227 }
1228 /*
1229 * Loop through all streams requested in configuration
1230 * Check if unsupported sizes have been requested on any of them
1231 */
1232 for (size_t j = 0; j < streamList->num_streams; j++) {
1233 bool sizeFound = false;
1234 camera3_stream_t *newStream = streamList->streams[j];
1235
1236 uint32_t rotatedHeight = newStream->height;
1237 uint32_t rotatedWidth = newStream->width;
1238 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1239 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1240 rotatedHeight = newStream->width;
1241 rotatedWidth = newStream->height;
1242 }
1243
1244 /*
1245 * Sizes are different for each type of stream format check against
1246 * appropriate table.
1247 */
1248 switch (newStream->format) {
1249 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1250 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1251 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001252 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1253 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1254 mPDSupported) {
1255 if ((depthWidth == newStream->width) &&
1256 (depthHeight == newStream->height)) {
1257 sizeFound = true;
1258 }
1259 break;
1260 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001261 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1262 for (size_t i = 0; i < count; i++) {
1263 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1264 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1265 sizeFound = true;
1266 break;
1267 }
1268 }
1269 break;
1270 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001271 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1272 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001273 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001274 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001275 if ((depthSamplesCount == newStream->width) &&
1276 (1 == newStream->height)) {
1277 sizeFound = true;
1278 }
1279 break;
1280 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001281 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1282 /* Verify set size against generated sizes table */
1283 for (size_t i = 0; i < count; i++) {
1284 if (((int32_t)rotatedWidth ==
1285 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1286 ((int32_t)rotatedHeight ==
1287 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1288 sizeFound = true;
1289 break;
1290 }
1291 }
1292 break;
1293 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1294 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1295 default:
1296 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1297 || newStream->stream_type == CAMERA3_STREAM_INPUT
1298 || IS_USAGE_ZSL(newStream->usage)) {
1299 if (((int32_t)rotatedWidth ==
1300 gCamCapability[mCameraId]->active_array_size.width) &&
1301 ((int32_t)rotatedHeight ==
1302 gCamCapability[mCameraId]->active_array_size.height)) {
1303 sizeFound = true;
1304 break;
1305 }
1306 /* We could potentially break here to enforce ZSL stream
1307 * set from frameworks always is full active array size
1308 * but it is not clear from the spc if framework will always
1309 * follow that, also we have logic to override to full array
1310 * size, so keeping the logic lenient at the moment
1311 */
1312 }
1313 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1314 MAX_SIZES_CNT);
1315 for (size_t i = 0; i < count; i++) {
1316 if (((int32_t)rotatedWidth ==
1317 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1318 ((int32_t)rotatedHeight ==
1319 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1320 sizeFound = true;
1321 break;
1322 }
1323 }
1324 break;
1325 } /* End of switch(newStream->format) */
1326
1327 /* We error out even if a single stream has unsupported size set */
1328 if (!sizeFound) {
1329 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1330 rotatedWidth, rotatedHeight, newStream->format,
1331 gCamCapability[mCameraId]->active_array_size.width,
1332 gCamCapability[mCameraId]->active_array_size.height);
1333 rc = -EINVAL;
1334 break;
1335 }
1336 } /* End of for each stream */
1337 return rc;
1338}
1339
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001340/*===========================================================================
1341 * FUNCTION : validateUsageFlags
1342 *
1343 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1344 *
1345 * PARAMETERS :
1346 * @stream_list : streams to be configured
1347 *
1348 * RETURN :
1349 * NO_ERROR if the usage flags are supported
1350 * error code if usage flags are not supported
1351 *
1352 *==========================================================================*/
1353int QCamera3HardwareInterface::validateUsageFlags(
1354 const camera3_stream_configuration_t* streamList)
1355{
1356 for (size_t j = 0; j < streamList->num_streams; j++) {
1357 const camera3_stream_t *newStream = streamList->streams[j];
1358
1359 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1360 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1361 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1362 continue;
1363 }
1364
Jason Leec4cf5032017-05-24 18:31:41 -07001365 // Here we only care whether it's EIS3 or not
1366 char is_type_value[PROPERTY_VALUE_MAX];
1367 property_get("persist.camera.is_type", is_type_value, "4");
1368 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1369 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1370 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1371 isType = IS_TYPE_NONE;
1372
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001373 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1374 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1375 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1376 bool forcePreviewUBWC = true;
1377 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1378 forcePreviewUBWC = false;
1379 }
1380 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001381 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001382 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001383 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001384 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001385 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001386
1387 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1388 // So color spaces will always match.
1389
1390 // Check whether underlying formats of shared streams match.
1391 if (isVideo && isPreview && videoFormat != previewFormat) {
1392 LOGE("Combined video and preview usage flag is not supported");
1393 return -EINVAL;
1394 }
1395 if (isPreview && isZSL && previewFormat != zslFormat) {
1396 LOGE("Combined preview and zsl usage flag is not supported");
1397 return -EINVAL;
1398 }
1399 if (isVideo && isZSL && videoFormat != zslFormat) {
1400 LOGE("Combined video and zsl usage flag is not supported");
1401 return -EINVAL;
1402 }
1403 }
1404 return NO_ERROR;
1405}
1406
1407/*===========================================================================
1408 * FUNCTION : validateUsageFlagsForEis
1409 *
1410 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1411 *
1412 * PARAMETERS :
1413 * @stream_list : streams to be configured
1414 *
1415 * RETURN :
1416 * NO_ERROR if the usage flags are supported
1417 * error code if usage flags are not supported
1418 *
1419 *==========================================================================*/
1420int QCamera3HardwareInterface::validateUsageFlagsForEis(
1421 const camera3_stream_configuration_t* streamList)
1422{
1423 for (size_t j = 0; j < streamList->num_streams; j++) {
1424 const camera3_stream_t *newStream = streamList->streams[j];
1425
1426 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1427 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1428
1429 // Because EIS is "hard-coded" for certain use case, and current
1430 // implementation doesn't support shared preview and video on the same
1431 // stream, return failure if EIS is forced on.
1432 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1433 LOGE("Combined video and preview usage flag is not supported due to EIS");
1434 return -EINVAL;
1435 }
1436 }
1437 return NO_ERROR;
1438}
1439
Thierry Strudel3d639192016-09-09 11:52:26 -07001440/*==============================================================================
1441 * FUNCTION : isSupportChannelNeeded
1442 *
1443 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1444 *
1445 * PARAMETERS :
1446 * @stream_list : streams to be configured
1447 * @stream_config_info : the config info for streams to be configured
1448 *
1449 * RETURN : Boolen true/false decision
1450 *
1451 *==========================================================================*/
1452bool QCamera3HardwareInterface::isSupportChannelNeeded(
1453 camera3_stream_configuration_t *streamList,
1454 cam_stream_size_info_t stream_config_info)
1455{
1456 uint32_t i;
1457 bool pprocRequested = false;
1458 /* Check for conditions where PProc pipeline does not have any streams*/
1459 for (i = 0; i < stream_config_info.num_streams; i++) {
1460 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1461 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1462 pprocRequested = true;
1463 break;
1464 }
1465 }
1466
1467 if (pprocRequested == false )
1468 return true;
1469
1470 /* Dummy stream needed if only raw or jpeg streams present */
1471 for (i = 0; i < streamList->num_streams; i++) {
1472 switch(streamList->streams[i]->format) {
1473 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1474 case HAL_PIXEL_FORMAT_RAW10:
1475 case HAL_PIXEL_FORMAT_RAW16:
1476 case HAL_PIXEL_FORMAT_BLOB:
1477 break;
1478 default:
1479 return false;
1480 }
1481 }
1482 return true;
1483}
1484
1485/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001486 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001487 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001488 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001489 *
1490 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001491 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001492 *
1493 * RETURN : int32_t type of status
1494 * NO_ERROR -- success
1495 * none-zero failure code
1496 *
1497 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001498int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001499{
1500 int32_t rc = NO_ERROR;
1501
1502 cam_dimension_t max_dim = {0, 0};
1503 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1504 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1505 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1506 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1507 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1508 }
1509
1510 clear_metadata_buffer(mParameters);
1511
1512 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1513 max_dim);
1514 if (rc != NO_ERROR) {
1515 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1516 return rc;
1517 }
1518
1519 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1520 if (rc != NO_ERROR) {
1521 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1522 return rc;
1523 }
1524
1525 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001526 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001527
1528 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1529 mParameters);
1530 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001531 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001532 return rc;
1533 }
1534
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001535 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001536 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1537 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1538 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1539 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1540 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001541
1542 return rc;
1543}
1544
1545/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001546 * FUNCTION : getCurrentSensorModeInfo
1547 *
1548 * DESCRIPTION: Get sensor mode information that is currently selected.
1549 *
1550 * PARAMETERS :
1551 * @sensorModeInfo : sensor mode information (output)
1552 *
1553 * RETURN : int32_t type of status
1554 * NO_ERROR -- success
1555 * none-zero failure code
1556 *
1557 *==========================================================================*/
1558int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1559{
1560 int32_t rc = NO_ERROR;
1561
1562 clear_metadata_buffer(mParameters);
1563 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1564
1565 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1566 mParameters);
1567 if (rc != NO_ERROR) {
1568 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1569 return rc;
1570 }
1571
1572 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1573 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1574 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1575 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1576 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1577 sensorModeInfo.num_raw_bits);
1578
1579 return rc;
1580}
1581
1582/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001583 * FUNCTION : addToPPFeatureMask
1584 *
1585 * DESCRIPTION: add additional features to pp feature mask based on
1586 * stream type and usecase
1587 *
1588 * PARAMETERS :
1589 * @stream_format : stream type for feature mask
1590 * @stream_idx : stream idx within postprocess_mask list to change
1591 *
1592 * RETURN : NULL
1593 *
1594 *==========================================================================*/
1595void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1596 uint32_t stream_idx)
1597{
1598 char feature_mask_value[PROPERTY_VALUE_MAX];
1599 cam_feature_mask_t feature_mask;
1600 int args_converted;
1601 int property_len;
1602
1603 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001604#ifdef _LE_CAMERA_
1605 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1606 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1607 property_len = property_get("persist.camera.hal3.feature",
1608 feature_mask_value, swtnr_feature_mask_value);
1609#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001610 property_len = property_get("persist.camera.hal3.feature",
1611 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001612#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001613 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1614 (feature_mask_value[1] == 'x')) {
1615 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1616 } else {
1617 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1618 }
1619 if (1 != args_converted) {
1620 feature_mask = 0;
1621 LOGE("Wrong feature mask %s", feature_mask_value);
1622 return;
1623 }
1624
1625 switch (stream_format) {
1626 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1627 /* Add LLVD to pp feature mask only if video hint is enabled */
1628 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1629 mStreamConfigInfo.postprocess_mask[stream_idx]
1630 |= CAM_QTI_FEATURE_SW_TNR;
1631 LOGH("Added SW TNR to pp feature mask");
1632 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1633 mStreamConfigInfo.postprocess_mask[stream_idx]
1634 |= CAM_QCOM_FEATURE_LLVD;
1635 LOGH("Added LLVD SeeMore to pp feature mask");
1636 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001637 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1638 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1639 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1640 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001641 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1642 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1643 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1644 CAM_QTI_FEATURE_BINNING_CORRECTION;
1645 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001646 break;
1647 }
1648 default:
1649 break;
1650 }
1651 LOGD("PP feature mask %llx",
1652 mStreamConfigInfo.postprocess_mask[stream_idx]);
1653}
1654
1655/*==============================================================================
1656 * FUNCTION : updateFpsInPreviewBuffer
1657 *
1658 * DESCRIPTION: update FPS information in preview buffer.
1659 *
1660 * PARAMETERS :
1661 * @metadata : pointer to metadata buffer
1662 * @frame_number: frame_number to look for in pending buffer list
1663 *
1664 * RETURN : None
1665 *
1666 *==========================================================================*/
1667void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1668 uint32_t frame_number)
1669{
1670 // Mark all pending buffers for this particular request
1671 // with corresponding framerate information
1672 for (List<PendingBuffersInRequest>::iterator req =
1673 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1674 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1675 for(List<PendingBufferInfo>::iterator j =
1676 req->mPendingBufferList.begin();
1677 j != req->mPendingBufferList.end(); j++) {
1678 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1679 if ((req->frame_number == frame_number) &&
1680 (channel->getStreamTypeMask() &
1681 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1682 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1683 CAM_INTF_PARM_FPS_RANGE, metadata) {
1684 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1685 struct private_handle_t *priv_handle =
1686 (struct private_handle_t *)(*(j->buffer));
1687 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1688 }
1689 }
1690 }
1691 }
1692}
1693
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001694/*==============================================================================
1695 * FUNCTION : updateTimeStampInPendingBuffers
1696 *
1697 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1698 * of a frame number
1699 *
1700 * PARAMETERS :
1701 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1702 * @timestamp : timestamp to be set
1703 *
1704 * RETURN : None
1705 *
1706 *==========================================================================*/
1707void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1708 uint32_t frameNumber, nsecs_t timestamp)
1709{
1710 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1711 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1712 if (req->frame_number != frameNumber)
1713 continue;
1714
1715 for (auto k = req->mPendingBufferList.begin();
1716 k != req->mPendingBufferList.end(); k++ ) {
1717 struct private_handle_t *priv_handle =
1718 (struct private_handle_t *) (*(k->buffer));
1719 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1720 }
1721 }
1722 return;
1723}
1724
Thierry Strudel3d639192016-09-09 11:52:26 -07001725/*===========================================================================
1726 * FUNCTION : configureStreams
1727 *
1728 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1729 * and output streams.
1730 *
1731 * PARAMETERS :
1732 * @stream_list : streams to be configured
1733 *
1734 * RETURN :
1735 *
1736 *==========================================================================*/
1737int QCamera3HardwareInterface::configureStreams(
1738 camera3_stream_configuration_t *streamList)
1739{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001740 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001741 int rc = 0;
1742
1743 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001744 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001745 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001746 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001747
1748 return rc;
1749}
1750
1751/*===========================================================================
1752 * FUNCTION : configureStreamsPerfLocked
1753 *
1754 * DESCRIPTION: configureStreams while perfLock is held.
1755 *
1756 * PARAMETERS :
1757 * @stream_list : streams to be configured
1758 *
1759 * RETURN : int32_t type of status
1760 * NO_ERROR -- success
1761 * none-zero failure code
1762 *==========================================================================*/
1763int QCamera3HardwareInterface::configureStreamsPerfLocked(
1764 camera3_stream_configuration_t *streamList)
1765{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001766 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001767 int rc = 0;
1768
1769 // Sanity check stream_list
1770 if (streamList == NULL) {
1771 LOGE("NULL stream configuration");
1772 return BAD_VALUE;
1773 }
1774 if (streamList->streams == NULL) {
1775 LOGE("NULL stream list");
1776 return BAD_VALUE;
1777 }
1778
1779 if (streamList->num_streams < 1) {
1780 LOGE("Bad number of streams requested: %d",
1781 streamList->num_streams);
1782 return BAD_VALUE;
1783 }
1784
1785 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1786 LOGE("Maximum number of streams %d exceeded: %d",
1787 MAX_NUM_STREAMS, streamList->num_streams);
1788 return BAD_VALUE;
1789 }
1790
Jason Leec4cf5032017-05-24 18:31:41 -07001791 mOpMode = streamList->operation_mode;
1792 LOGD("mOpMode: %d", mOpMode);
1793
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001794 rc = validateUsageFlags(streamList);
1795 if (rc != NO_ERROR) {
1796 return rc;
1797 }
1798
Thierry Strudel3d639192016-09-09 11:52:26 -07001799 /* first invalidate all the steams in the mStreamList
1800 * if they appear again, they will be validated */
1801 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1802 it != mStreamInfo.end(); it++) {
1803 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1804 if (channel) {
1805 channel->stop();
1806 }
1807 (*it)->status = INVALID;
1808 }
1809
1810 if (mRawDumpChannel) {
1811 mRawDumpChannel->stop();
1812 delete mRawDumpChannel;
1813 mRawDumpChannel = NULL;
1814 }
1815
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001816 if (mHdrPlusRawSrcChannel) {
1817 mHdrPlusRawSrcChannel->stop();
1818 delete mHdrPlusRawSrcChannel;
1819 mHdrPlusRawSrcChannel = NULL;
1820 }
1821
Thierry Strudel3d639192016-09-09 11:52:26 -07001822 if (mSupportChannel)
1823 mSupportChannel->stop();
1824
1825 if (mAnalysisChannel) {
1826 mAnalysisChannel->stop();
1827 }
1828 if (mMetadataChannel) {
1829 /* If content of mStreamInfo is not 0, there is metadata stream */
1830 mMetadataChannel->stop();
1831 }
1832 if (mChannelHandle) {
1833 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07001834 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001835 LOGD("stopping channel %d", mChannelHandle);
1836 }
1837
1838 pthread_mutex_lock(&mMutex);
1839
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001840 mPictureChannel = NULL;
1841
Thierry Strudel3d639192016-09-09 11:52:26 -07001842 // Check state
1843 switch (mState) {
1844 case INITIALIZED:
1845 case CONFIGURED:
1846 case STARTED:
1847 /* valid state */
1848 break;
1849 default:
1850 LOGE("Invalid state %d", mState);
1851 pthread_mutex_unlock(&mMutex);
1852 return -ENODEV;
1853 }
1854
1855 /* Check whether we have video stream */
1856 m_bIs4KVideo = false;
1857 m_bIsVideo = false;
1858 m_bEisSupportedSize = false;
1859 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001860 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001861 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001862 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001863 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001864 uint32_t videoWidth = 0U;
1865 uint32_t videoHeight = 0U;
1866 size_t rawStreamCnt = 0;
1867 size_t stallStreamCnt = 0;
1868 size_t processedStreamCnt = 0;
1869 // Number of streams on ISP encoder path
1870 size_t numStreamsOnEncoder = 0;
1871 size_t numYuv888OnEncoder = 0;
1872 bool bYuv888OverrideJpeg = false;
1873 cam_dimension_t largeYuv888Size = {0, 0};
1874 cam_dimension_t maxViewfinderSize = {0, 0};
1875 bool bJpegExceeds4K = false;
1876 bool bJpegOnEncoder = false;
1877 bool bUseCommonFeatureMask = false;
1878 cam_feature_mask_t commonFeatureMask = 0;
1879 bool bSmallJpegSize = false;
1880 uint32_t width_ratio;
1881 uint32_t height_ratio;
1882 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1883 camera3_stream_t *inputStream = NULL;
1884 bool isJpeg = false;
1885 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001886 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001887 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001888
1889 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1890
1891 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001892 uint8_t eis_prop_set;
1893 uint32_t maxEisWidth = 0;
1894 uint32_t maxEisHeight = 0;
1895
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001896 // Initialize all instant AEC related variables
1897 mInstantAEC = false;
1898 mResetInstantAEC = false;
1899 mInstantAECSettledFrameNumber = 0;
1900 mAecSkipDisplayFrameBound = 0;
1901 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001902 mCurrFeatureState = 0;
1903 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001904
Thierry Strudel3d639192016-09-09 11:52:26 -07001905 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1906
1907 size_t count = IS_TYPE_MAX;
1908 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1909 for (size_t i = 0; i < count; i++) {
1910 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001911 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1912 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001913 break;
1914 }
1915 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001916
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001917 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001918 maxEisWidth = MAX_EIS_WIDTH;
1919 maxEisHeight = MAX_EIS_HEIGHT;
1920 }
1921
1922 /* EIS setprop control */
1923 char eis_prop[PROPERTY_VALUE_MAX];
1924 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001925 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001926 eis_prop_set = (uint8_t)atoi(eis_prop);
1927
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001928 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001929 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1930
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001931 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1932 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001933
Thierry Strudel3d639192016-09-09 11:52:26 -07001934 /* stream configurations */
1935 for (size_t i = 0; i < streamList->num_streams; i++) {
1936 camera3_stream_t *newStream = streamList->streams[i];
1937 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1938 "height = %d, rotation = %d, usage = 0x%x",
1939 i, newStream->stream_type, newStream->format,
1940 newStream->width, newStream->height, newStream->rotation,
1941 newStream->usage);
1942 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1943 newStream->stream_type == CAMERA3_STREAM_INPUT){
1944 isZsl = true;
1945 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001946 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1947 IS_USAGE_PREVIEW(newStream->usage)) {
1948 isPreview = true;
1949 }
1950
Thierry Strudel3d639192016-09-09 11:52:26 -07001951 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1952 inputStream = newStream;
1953 }
1954
Emilian Peev7650c122017-01-19 08:24:33 -08001955 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1956 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001957 isJpeg = true;
1958 jpegSize.width = newStream->width;
1959 jpegSize.height = newStream->height;
1960 if (newStream->width > VIDEO_4K_WIDTH ||
1961 newStream->height > VIDEO_4K_HEIGHT)
1962 bJpegExceeds4K = true;
1963 }
1964
1965 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1966 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1967 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001968 // In HAL3 we can have multiple different video streams.
1969 // The variables video width and height are used below as
1970 // dimensions of the biggest of them
1971 if (videoWidth < newStream->width ||
1972 videoHeight < newStream->height) {
1973 videoWidth = newStream->width;
1974 videoHeight = newStream->height;
1975 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001976 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1977 (VIDEO_4K_HEIGHT <= newStream->height)) {
1978 m_bIs4KVideo = true;
1979 }
1980 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1981 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001982
Thierry Strudel3d639192016-09-09 11:52:26 -07001983 }
1984 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1985 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1986 switch (newStream->format) {
1987 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001988 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1989 depthPresent = true;
1990 break;
1991 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001992 stallStreamCnt++;
1993 if (isOnEncoder(maxViewfinderSize, newStream->width,
1994 newStream->height)) {
1995 numStreamsOnEncoder++;
1996 bJpegOnEncoder = true;
1997 }
1998 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1999 newStream->width);
2000 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
2001 newStream->height);;
2002 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2003 "FATAL: max_downscale_factor cannot be zero and so assert");
2004 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2005 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2006 LOGH("Setting small jpeg size flag to true");
2007 bSmallJpegSize = true;
2008 }
2009 break;
2010 case HAL_PIXEL_FORMAT_RAW10:
2011 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2012 case HAL_PIXEL_FORMAT_RAW16:
2013 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002014 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2015 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2016 pdStatCount++;
2017 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002018 break;
2019 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2020 processedStreamCnt++;
2021 if (isOnEncoder(maxViewfinderSize, newStream->width,
2022 newStream->height)) {
2023 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2024 !IS_USAGE_ZSL(newStream->usage)) {
2025 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2026 }
2027 numStreamsOnEncoder++;
2028 }
2029 break;
2030 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2031 processedStreamCnt++;
2032 if (isOnEncoder(maxViewfinderSize, newStream->width,
2033 newStream->height)) {
2034 // If Yuv888 size is not greater than 4K, set feature mask
2035 // to SUPERSET so that it support concurrent request on
2036 // YUV and JPEG.
2037 if (newStream->width <= VIDEO_4K_WIDTH &&
2038 newStream->height <= VIDEO_4K_HEIGHT) {
2039 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2040 }
2041 numStreamsOnEncoder++;
2042 numYuv888OnEncoder++;
2043 largeYuv888Size.width = newStream->width;
2044 largeYuv888Size.height = newStream->height;
2045 }
2046 break;
2047 default:
2048 processedStreamCnt++;
2049 if (isOnEncoder(maxViewfinderSize, newStream->width,
2050 newStream->height)) {
2051 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2052 numStreamsOnEncoder++;
2053 }
2054 break;
2055 }
2056
2057 }
2058 }
2059
2060 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2061 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2062 !m_bIsVideo) {
2063 m_bEisEnable = false;
2064 }
2065
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002066 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2067 pthread_mutex_unlock(&mMutex);
2068 return -EINVAL;
2069 }
2070
Thierry Strudel54dc9782017-02-15 12:12:10 -08002071 uint8_t forceEnableTnr = 0;
2072 char tnr_prop[PROPERTY_VALUE_MAX];
2073 memset(tnr_prop, 0, sizeof(tnr_prop));
2074 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2075 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2076
Thierry Strudel3d639192016-09-09 11:52:26 -07002077 /* Logic to enable/disable TNR based on specific config size/etc.*/
2078 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002079 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2080 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002081 else if (forceEnableTnr)
2082 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002083
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002084 char videoHdrProp[PROPERTY_VALUE_MAX];
2085 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2086 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2087 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2088
2089 if (hdr_mode_prop == 1 && m_bIsVideo &&
2090 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2091 m_bVideoHdrEnabled = true;
2092 else
2093 m_bVideoHdrEnabled = false;
2094
2095
Thierry Strudel3d639192016-09-09 11:52:26 -07002096 /* Check if num_streams is sane */
2097 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2098 rawStreamCnt > MAX_RAW_STREAMS ||
2099 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2100 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2101 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2102 pthread_mutex_unlock(&mMutex);
2103 return -EINVAL;
2104 }
2105 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002106 if (isZsl && m_bIs4KVideo) {
2107 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002108 pthread_mutex_unlock(&mMutex);
2109 return -EINVAL;
2110 }
2111 /* Check if stream sizes are sane */
2112 if (numStreamsOnEncoder > 2) {
2113 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2114 pthread_mutex_unlock(&mMutex);
2115 return -EINVAL;
2116 } else if (1 < numStreamsOnEncoder){
2117 bUseCommonFeatureMask = true;
2118 LOGH("Multiple streams above max viewfinder size, common mask needed");
2119 }
2120
2121 /* Check if BLOB size is greater than 4k in 4k recording case */
2122 if (m_bIs4KVideo && bJpegExceeds4K) {
2123 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2124 pthread_mutex_unlock(&mMutex);
2125 return -EINVAL;
2126 }
2127
Emilian Peev7650c122017-01-19 08:24:33 -08002128 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2129 depthPresent) {
2130 LOGE("HAL doesn't support depth streams in HFR mode!");
2131 pthread_mutex_unlock(&mMutex);
2132 return -EINVAL;
2133 }
2134
Thierry Strudel3d639192016-09-09 11:52:26 -07002135 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2136 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2137 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2138 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2139 // configurations:
2140 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2141 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2142 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2143 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2144 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2145 __func__);
2146 pthread_mutex_unlock(&mMutex);
2147 return -EINVAL;
2148 }
2149
2150 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2151 // the YUV stream's size is greater or equal to the JPEG size, set common
2152 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2153 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2154 jpegSize.width, jpegSize.height) &&
2155 largeYuv888Size.width > jpegSize.width &&
2156 largeYuv888Size.height > jpegSize.height) {
2157 bYuv888OverrideJpeg = true;
2158 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2159 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2160 }
2161
2162 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2163 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2164 commonFeatureMask);
2165 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2166 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2167
2168 rc = validateStreamDimensions(streamList);
2169 if (rc == NO_ERROR) {
2170 rc = validateStreamRotations(streamList);
2171 }
2172 if (rc != NO_ERROR) {
2173 LOGE("Invalid stream configuration requested!");
2174 pthread_mutex_unlock(&mMutex);
2175 return rc;
2176 }
2177
Emilian Peev0f3c3162017-03-15 12:57:46 +00002178 if (1 < pdStatCount) {
2179 LOGE("HAL doesn't support multiple PD streams");
2180 pthread_mutex_unlock(&mMutex);
2181 return -EINVAL;
2182 }
2183
2184 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2185 (1 == pdStatCount)) {
2186 LOGE("HAL doesn't support PD streams in HFR mode!");
2187 pthread_mutex_unlock(&mMutex);
2188 return -EINVAL;
2189 }
2190
Thierry Strudel3d639192016-09-09 11:52:26 -07002191 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2192 for (size_t i = 0; i < streamList->num_streams; i++) {
2193 camera3_stream_t *newStream = streamList->streams[i];
2194 LOGH("newStream type = %d, stream format = %d "
2195 "stream size : %d x %d, stream rotation = %d",
2196 newStream->stream_type, newStream->format,
2197 newStream->width, newStream->height, newStream->rotation);
2198 //if the stream is in the mStreamList validate it
2199 bool stream_exists = false;
2200 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2201 it != mStreamInfo.end(); it++) {
2202 if ((*it)->stream == newStream) {
2203 QCamera3ProcessingChannel *channel =
2204 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2205 stream_exists = true;
2206 if (channel)
2207 delete channel;
2208 (*it)->status = VALID;
2209 (*it)->stream->priv = NULL;
2210 (*it)->channel = NULL;
2211 }
2212 }
2213 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2214 //new stream
2215 stream_info_t* stream_info;
2216 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2217 if (!stream_info) {
2218 LOGE("Could not allocate stream info");
2219 rc = -ENOMEM;
2220 pthread_mutex_unlock(&mMutex);
2221 return rc;
2222 }
2223 stream_info->stream = newStream;
2224 stream_info->status = VALID;
2225 stream_info->channel = NULL;
2226 mStreamInfo.push_back(stream_info);
2227 }
2228 /* Covers Opaque ZSL and API1 F/W ZSL */
2229 if (IS_USAGE_ZSL(newStream->usage)
2230 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2231 if (zslStream != NULL) {
2232 LOGE("Multiple input/reprocess streams requested!");
2233 pthread_mutex_unlock(&mMutex);
2234 return BAD_VALUE;
2235 }
2236 zslStream = newStream;
2237 }
2238 /* Covers YUV reprocess */
2239 if (inputStream != NULL) {
2240 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2241 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2242 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2243 && inputStream->width == newStream->width
2244 && inputStream->height == newStream->height) {
2245 if (zslStream != NULL) {
2246 /* This scenario indicates multiple YUV streams with same size
2247 * as input stream have been requested, since zsl stream handle
2248 * is solely use for the purpose of overriding the size of streams
2249 * which share h/w streams we will just make a guess here as to
2250 * which of the stream is a ZSL stream, this will be refactored
2251 * once we make generic logic for streams sharing encoder output
2252 */
2253 LOGH("Warning, Multiple ip/reprocess streams requested!");
2254 }
2255 zslStream = newStream;
2256 }
2257 }
2258 }
2259
2260 /* If a zsl stream is set, we know that we have configured at least one input or
2261 bidirectional stream */
2262 if (NULL != zslStream) {
2263 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2264 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2265 mInputStreamInfo.format = zslStream->format;
2266 mInputStreamInfo.usage = zslStream->usage;
2267 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2268 mInputStreamInfo.dim.width,
2269 mInputStreamInfo.dim.height,
2270 mInputStreamInfo.format, mInputStreamInfo.usage);
2271 }
2272
2273 cleanAndSortStreamInfo();
2274 if (mMetadataChannel) {
2275 delete mMetadataChannel;
2276 mMetadataChannel = NULL;
2277 }
2278 if (mSupportChannel) {
2279 delete mSupportChannel;
2280 mSupportChannel = NULL;
2281 }
2282
2283 if (mAnalysisChannel) {
2284 delete mAnalysisChannel;
2285 mAnalysisChannel = NULL;
2286 }
2287
2288 if (mDummyBatchChannel) {
2289 delete mDummyBatchChannel;
2290 mDummyBatchChannel = NULL;
2291 }
2292
Emilian Peev7650c122017-01-19 08:24:33 -08002293 if (mDepthChannel) {
2294 mDepthChannel = NULL;
2295 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002296 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002297
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002298 mShutterDispatcher.clear();
2299 mOutputBufferDispatcher.clear();
2300
Thierry Strudel2896d122017-02-23 19:18:03 -08002301 char is_type_value[PROPERTY_VALUE_MAX];
2302 property_get("persist.camera.is_type", is_type_value, "4");
2303 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2304
Binhao Line406f062017-05-03 14:39:44 -07002305 char property_value[PROPERTY_VALUE_MAX];
2306 property_get("persist.camera.gzoom.at", property_value, "0");
2307 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002308 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2309 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2310 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2311 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002312
2313 property_get("persist.camera.gzoom.4k", property_value, "0");
2314 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2315
Thierry Strudel3d639192016-09-09 11:52:26 -07002316 //Create metadata channel and initialize it
2317 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2318 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2319 gCamCapability[mCameraId]->color_arrangement);
2320 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2321 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002322 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002323 if (mMetadataChannel == NULL) {
2324 LOGE("failed to allocate metadata channel");
2325 rc = -ENOMEM;
2326 pthread_mutex_unlock(&mMutex);
2327 return rc;
2328 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002329 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002330 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2331 if (rc < 0) {
2332 LOGE("metadata channel initialization failed");
2333 delete mMetadataChannel;
2334 mMetadataChannel = NULL;
2335 pthread_mutex_unlock(&mMutex);
2336 return rc;
2337 }
2338
Thierry Strudel2896d122017-02-23 19:18:03 -08002339 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002340 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002341 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002342 // Keep track of preview/video streams indices.
2343 // There could be more than one preview streams, but only one video stream.
2344 int32_t video_stream_idx = -1;
2345 int32_t preview_stream_idx[streamList->num_streams];
2346 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002347 bool previewTnr[streamList->num_streams];
2348 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2349 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2350 // Loop through once to determine preview TNR conditions before creating channels.
2351 for (size_t i = 0; i < streamList->num_streams; i++) {
2352 camera3_stream_t *newStream = streamList->streams[i];
2353 uint32_t stream_usage = newStream->usage;
2354 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2355 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2356 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2357 video_stream_idx = (int32_t)i;
2358 else
2359 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2360 }
2361 }
2362 // By default, preview stream TNR is disabled.
2363 // Enable TNR to the preview stream if all conditions below are satisfied:
2364 // 1. preview resolution == video resolution.
2365 // 2. video stream TNR is enabled.
2366 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2367 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2368 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2369 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2370 if (m_bTnrEnabled && m_bTnrVideo &&
2371 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2372 video_stream->width == preview_stream->width &&
2373 video_stream->height == preview_stream->height) {
2374 previewTnr[preview_stream_idx[i]] = true;
2375 }
2376 }
2377
Thierry Strudel3d639192016-09-09 11:52:26 -07002378 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2379 /* Allocate channel objects for the requested streams */
2380 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002381
Thierry Strudel3d639192016-09-09 11:52:26 -07002382 camera3_stream_t *newStream = streamList->streams[i];
2383 uint32_t stream_usage = newStream->usage;
2384 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2385 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2386 struct camera_info *p_info = NULL;
2387 pthread_mutex_lock(&gCamLock);
2388 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2389 pthread_mutex_unlock(&gCamLock);
2390 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2391 || IS_USAGE_ZSL(newStream->usage)) &&
2392 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002393 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002394 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002395 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2396 if (bUseCommonFeatureMask)
2397 zsl_ppmask = commonFeatureMask;
2398 else
2399 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002400 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002401 if (numStreamsOnEncoder > 0)
2402 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2403 else
2404 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002405 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002406 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002407 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002408 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002409 LOGH("Input stream configured, reprocess config");
2410 } else {
2411 //for non zsl streams find out the format
2412 switch (newStream->format) {
2413 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2414 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002415 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002416 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2417 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2418 /* add additional features to pp feature mask */
2419 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2420 mStreamConfigInfo.num_streams);
2421
2422 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2423 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2424 CAM_STREAM_TYPE_VIDEO;
2425 if (m_bTnrEnabled && m_bTnrVideo) {
2426 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2427 CAM_QCOM_FEATURE_CPP_TNR;
2428 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2429 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2430 ~CAM_QCOM_FEATURE_CDS;
2431 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002432 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2433 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2434 CAM_QTI_FEATURE_PPEISCORE;
2435 }
Binhao Line406f062017-05-03 14:39:44 -07002436 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2438 CAM_QCOM_FEATURE_GOOG_ZOOM;
2439 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002440 } else {
2441 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2442 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002443 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002444 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2445 CAM_QCOM_FEATURE_CPP_TNR;
2446 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2447 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2448 ~CAM_QCOM_FEATURE_CDS;
2449 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002450 if(!m_bSwTnrPreview) {
2451 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2452 ~CAM_QTI_FEATURE_SW_TNR;
2453 }
Binhao Line406f062017-05-03 14:39:44 -07002454 if (is_goog_zoom_preview_enabled) {
2455 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2456 CAM_QCOM_FEATURE_GOOG_ZOOM;
2457 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002458 padding_info.width_padding = mSurfaceStridePadding;
2459 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002460 previewSize.width = (int32_t)newStream->width;
2461 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002462 }
2463 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2464 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2465 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2466 newStream->height;
2467 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2468 newStream->width;
2469 }
2470 }
2471 break;
2472 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002473 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002474 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2475 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2476 if (bUseCommonFeatureMask)
2477 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2478 commonFeatureMask;
2479 else
2480 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2481 CAM_QCOM_FEATURE_NONE;
2482 } else {
2483 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2484 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2485 }
2486 break;
2487 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002488 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002489 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2490 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2491 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2492 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2493 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002494 /* Remove rotation if it is not supported
2495 for 4K LiveVideo snapshot case (online processing) */
2496 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2497 CAM_QCOM_FEATURE_ROTATION)) {
2498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2499 &= ~CAM_QCOM_FEATURE_ROTATION;
2500 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002501 } else {
2502 if (bUseCommonFeatureMask &&
2503 isOnEncoder(maxViewfinderSize, newStream->width,
2504 newStream->height)) {
2505 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2506 } else {
2507 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2508 }
2509 }
2510 if (isZsl) {
2511 if (zslStream) {
2512 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2513 (int32_t)zslStream->width;
2514 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2515 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002516 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2517 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002518 } else {
2519 LOGE("Error, No ZSL stream identified");
2520 pthread_mutex_unlock(&mMutex);
2521 return -EINVAL;
2522 }
2523 } else if (m_bIs4KVideo) {
2524 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2525 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2526 } else if (bYuv888OverrideJpeg) {
2527 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2528 (int32_t)largeYuv888Size.width;
2529 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2530 (int32_t)largeYuv888Size.height;
2531 }
2532 break;
2533 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2534 case HAL_PIXEL_FORMAT_RAW16:
2535 case HAL_PIXEL_FORMAT_RAW10:
2536 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2537 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2538 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002539 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2540 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2541 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2542 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2543 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2544 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2545 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2546 gCamCapability[mCameraId]->dt[mPDIndex];
2547 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2548 gCamCapability[mCameraId]->vc[mPDIndex];
2549 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002550 break;
2551 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002552 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002553 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2554 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2555 break;
2556 }
2557 }
2558
2559 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2560 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2561 gCamCapability[mCameraId]->color_arrangement);
2562
2563 if (newStream->priv == NULL) {
2564 //New stream, construct channel
2565 switch (newStream->stream_type) {
2566 case CAMERA3_STREAM_INPUT:
2567 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2568 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2569 break;
2570 case CAMERA3_STREAM_BIDIRECTIONAL:
2571 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2572 GRALLOC_USAGE_HW_CAMERA_WRITE;
2573 break;
2574 case CAMERA3_STREAM_OUTPUT:
2575 /* For video encoding stream, set read/write rarely
2576 * flag so that they may be set to un-cached */
2577 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2578 newStream->usage |=
2579 (GRALLOC_USAGE_SW_READ_RARELY |
2580 GRALLOC_USAGE_SW_WRITE_RARELY |
2581 GRALLOC_USAGE_HW_CAMERA_WRITE);
2582 else if (IS_USAGE_ZSL(newStream->usage))
2583 {
2584 LOGD("ZSL usage flag skipping");
2585 }
2586 else if (newStream == zslStream
2587 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2588 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2589 } else
2590 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2591 break;
2592 default:
2593 LOGE("Invalid stream_type %d", newStream->stream_type);
2594 break;
2595 }
2596
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002597 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002598 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2599 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2600 QCamera3ProcessingChannel *channel = NULL;
2601 switch (newStream->format) {
2602 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2603 if ((newStream->usage &
2604 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2605 (streamList->operation_mode ==
2606 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2607 ) {
2608 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2609 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002610 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002611 this,
2612 newStream,
2613 (cam_stream_type_t)
2614 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2615 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2616 mMetadataChannel,
2617 0); //heap buffers are not required for HFR video channel
2618 if (channel == NULL) {
2619 LOGE("allocation of channel failed");
2620 pthread_mutex_unlock(&mMutex);
2621 return -ENOMEM;
2622 }
2623 //channel->getNumBuffers() will return 0 here so use
2624 //MAX_INFLIGH_HFR_REQUESTS
2625 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2626 newStream->priv = channel;
2627 LOGI("num video buffers in HFR mode: %d",
2628 MAX_INFLIGHT_HFR_REQUESTS);
2629 } else {
2630 /* Copy stream contents in HFR preview only case to create
2631 * dummy batch channel so that sensor streaming is in
2632 * HFR mode */
2633 if (!m_bIsVideo && (streamList->operation_mode ==
2634 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2635 mDummyBatchStream = *newStream;
2636 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002637 int bufferCount = MAX_INFLIGHT_REQUESTS;
2638 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2639 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002640 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2641 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2642 bufferCount = m_bIs4KVideo ?
2643 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2644 }
2645
Thierry Strudel2896d122017-02-23 19:18:03 -08002646 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002647 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2648 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002649 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002650 this,
2651 newStream,
2652 (cam_stream_type_t)
2653 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2654 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2655 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002656 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002657 if (channel == NULL) {
2658 LOGE("allocation of channel failed");
2659 pthread_mutex_unlock(&mMutex);
2660 return -ENOMEM;
2661 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002662 /* disable UBWC for preview, though supported,
2663 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002664 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002665 (previewSize.width == (int32_t)videoWidth)&&
2666 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002667 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002668 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002669 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002670 /* When goog_zoom is linked to the preview or video stream,
2671 * disable ubwc to the linked stream */
2672 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2673 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2674 channel->setUBWCEnabled(false);
2675 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002676 newStream->max_buffers = channel->getNumBuffers();
2677 newStream->priv = channel;
2678 }
2679 break;
2680 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2681 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2682 mChannelHandle,
2683 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002684 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002685 this,
2686 newStream,
2687 (cam_stream_type_t)
2688 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2689 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2690 mMetadataChannel);
2691 if (channel == NULL) {
2692 LOGE("allocation of YUV channel failed");
2693 pthread_mutex_unlock(&mMutex);
2694 return -ENOMEM;
2695 }
2696 newStream->max_buffers = channel->getNumBuffers();
2697 newStream->priv = channel;
2698 break;
2699 }
2700 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2701 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002702 case HAL_PIXEL_FORMAT_RAW10: {
2703 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2704 (HAL_DATASPACE_DEPTH != newStream->data_space))
2705 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 mRawChannel = new QCamera3RawChannel(
2707 mCameraHandle->camera_handle, mChannelHandle,
2708 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002709 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002710 this, newStream,
2711 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002712 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002713 if (mRawChannel == NULL) {
2714 LOGE("allocation of raw channel failed");
2715 pthread_mutex_unlock(&mMutex);
2716 return -ENOMEM;
2717 }
2718 newStream->max_buffers = mRawChannel->getNumBuffers();
2719 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2720 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002721 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002722 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002723 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2724 mDepthChannel = new QCamera3DepthChannel(
2725 mCameraHandle->camera_handle, mChannelHandle,
2726 mCameraHandle->ops, NULL, NULL, &padding_info,
2727 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2728 mMetadataChannel);
2729 if (NULL == mDepthChannel) {
2730 LOGE("Allocation of depth channel failed");
2731 pthread_mutex_unlock(&mMutex);
2732 return NO_MEMORY;
2733 }
2734 newStream->priv = mDepthChannel;
2735 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2736 } else {
2737 // Max live snapshot inflight buffer is 1. This is to mitigate
2738 // frame drop issues for video snapshot. The more buffers being
2739 // allocated, the more frame drops there are.
2740 mPictureChannel = new QCamera3PicChannel(
2741 mCameraHandle->camera_handle, mChannelHandle,
2742 mCameraHandle->ops, captureResultCb,
2743 setBufferErrorStatus, &padding_info, this, newStream,
2744 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2745 m_bIs4KVideo, isZsl, mMetadataChannel,
2746 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2747 if (mPictureChannel == NULL) {
2748 LOGE("allocation of channel failed");
2749 pthread_mutex_unlock(&mMutex);
2750 return -ENOMEM;
2751 }
2752 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2753 newStream->max_buffers = mPictureChannel->getNumBuffers();
2754 mPictureChannel->overrideYuvSize(
2755 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2756 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002757 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002758 break;
2759
2760 default:
2761 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002762 pthread_mutex_unlock(&mMutex);
2763 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002764 }
2765 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2766 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2767 } else {
2768 LOGE("Error, Unknown stream type");
2769 pthread_mutex_unlock(&mMutex);
2770 return -EINVAL;
2771 }
2772
2773 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002774 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002775 // Here we only care whether it's EIS3 or not
2776 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2777 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2778 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2779 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002780 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002781 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002782 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002783 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2784 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2785 }
2786 }
2787
2788 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2789 it != mStreamInfo.end(); it++) {
2790 if ((*it)->stream == newStream) {
2791 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2792 break;
2793 }
2794 }
2795 } else {
2796 // Channel already exists for this stream
2797 // Do nothing for now
2798 }
2799 padding_info = gCamCapability[mCameraId]->padding_info;
2800
Emilian Peev7650c122017-01-19 08:24:33 -08002801 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002802 * since there is no real stream associated with it
2803 */
Emilian Peev7650c122017-01-19 08:24:33 -08002804 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002805 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2806 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002807 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002808 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002809 }
2810
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002811 // Let buffer dispatcher know the configured streams.
2812 mOutputBufferDispatcher.configureStreams(streamList);
2813
Thierry Strudel2896d122017-02-23 19:18:03 -08002814 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2815 onlyRaw = false;
2816 }
2817
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002818 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002819 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002820 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002821 cam_analysis_info_t analysisInfo;
2822 int32_t ret = NO_ERROR;
2823 ret = mCommon.getAnalysisInfo(
2824 FALSE,
2825 analysisFeatureMask,
2826 &analysisInfo);
2827 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002828 cam_color_filter_arrangement_t analysis_color_arrangement =
2829 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2830 CAM_FILTER_ARRANGEMENT_Y :
2831 gCamCapability[mCameraId]->color_arrangement);
2832 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2833 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002834 cam_dimension_t analysisDim;
2835 analysisDim = mCommon.getMatchingDimension(previewSize,
2836 analysisInfo.analysis_recommended_res);
2837
2838 mAnalysisChannel = new QCamera3SupportChannel(
2839 mCameraHandle->camera_handle,
2840 mChannelHandle,
2841 mCameraHandle->ops,
2842 &analysisInfo.analysis_padding_info,
2843 analysisFeatureMask,
2844 CAM_STREAM_TYPE_ANALYSIS,
2845 &analysisDim,
2846 (analysisInfo.analysis_format
2847 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2848 : CAM_FORMAT_YUV_420_NV21),
2849 analysisInfo.hw_analysis_supported,
2850 gCamCapability[mCameraId]->color_arrangement,
2851 this,
2852 0); // force buffer count to 0
2853 } else {
2854 LOGW("getAnalysisInfo failed, ret = %d", ret);
2855 }
2856 if (!mAnalysisChannel) {
2857 LOGW("Analysis channel cannot be created");
2858 }
2859 }
2860
Thierry Strudel3d639192016-09-09 11:52:26 -07002861 //RAW DUMP channel
2862 if (mEnableRawDump && isRawStreamRequested == false){
2863 cam_dimension_t rawDumpSize;
2864 rawDumpSize = getMaxRawSize(mCameraId);
2865 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2866 setPAAFSupport(rawDumpFeatureMask,
2867 CAM_STREAM_TYPE_RAW,
2868 gCamCapability[mCameraId]->color_arrangement);
2869 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2870 mChannelHandle,
2871 mCameraHandle->ops,
2872 rawDumpSize,
2873 &padding_info,
2874 this, rawDumpFeatureMask);
2875 if (!mRawDumpChannel) {
2876 LOGE("Raw Dump channel cannot be created");
2877 pthread_mutex_unlock(&mMutex);
2878 return -ENOMEM;
2879 }
2880 }
2881
Thierry Strudel3d639192016-09-09 11:52:26 -07002882 if (mAnalysisChannel) {
2883 cam_analysis_info_t analysisInfo;
2884 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2885 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2886 CAM_STREAM_TYPE_ANALYSIS;
2887 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2888 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002889 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002890 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2891 &analysisInfo);
2892 if (rc != NO_ERROR) {
2893 LOGE("getAnalysisInfo failed, ret = %d", rc);
2894 pthread_mutex_unlock(&mMutex);
2895 return rc;
2896 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002897 cam_color_filter_arrangement_t analysis_color_arrangement =
2898 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2899 CAM_FILTER_ARRANGEMENT_Y :
2900 gCamCapability[mCameraId]->color_arrangement);
2901 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2902 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2903 analysis_color_arrangement);
2904
Thierry Strudel3d639192016-09-09 11:52:26 -07002905 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002906 mCommon.getMatchingDimension(previewSize,
2907 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002908 mStreamConfigInfo.num_streams++;
2909 }
2910
Thierry Strudel2896d122017-02-23 19:18:03 -08002911 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002912 cam_analysis_info_t supportInfo;
2913 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2914 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2915 setPAAFSupport(callbackFeatureMask,
2916 CAM_STREAM_TYPE_CALLBACK,
2917 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002918 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002919 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002920 if (ret != NO_ERROR) {
2921 /* Ignore the error for Mono camera
2922 * because the PAAF bit mask is only set
2923 * for CAM_STREAM_TYPE_ANALYSIS stream type
2924 */
2925 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2926 LOGW("getAnalysisInfo failed, ret = %d", ret);
2927 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002928 }
2929 mSupportChannel = new QCamera3SupportChannel(
2930 mCameraHandle->camera_handle,
2931 mChannelHandle,
2932 mCameraHandle->ops,
2933 &gCamCapability[mCameraId]->padding_info,
2934 callbackFeatureMask,
2935 CAM_STREAM_TYPE_CALLBACK,
2936 &QCamera3SupportChannel::kDim,
2937 CAM_FORMAT_YUV_420_NV21,
2938 supportInfo.hw_analysis_supported,
2939 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002940 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002941 if (!mSupportChannel) {
2942 LOGE("dummy channel cannot be created");
2943 pthread_mutex_unlock(&mMutex);
2944 return -ENOMEM;
2945 }
2946 }
2947
2948 if (mSupportChannel) {
2949 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2950 QCamera3SupportChannel::kDim;
2951 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2952 CAM_STREAM_TYPE_CALLBACK;
2953 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2954 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2955 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2956 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2957 gCamCapability[mCameraId]->color_arrangement);
2958 mStreamConfigInfo.num_streams++;
2959 }
2960
2961 if (mRawDumpChannel) {
2962 cam_dimension_t rawSize;
2963 rawSize = getMaxRawSize(mCameraId);
2964 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2965 rawSize;
2966 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2967 CAM_STREAM_TYPE_RAW;
2968 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2969 CAM_QCOM_FEATURE_NONE;
2970 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2971 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2972 gCamCapability[mCameraId]->color_arrangement);
2973 mStreamConfigInfo.num_streams++;
2974 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002975
2976 if (mHdrPlusRawSrcChannel) {
2977 cam_dimension_t rawSize;
2978 rawSize = getMaxRawSize(mCameraId);
2979 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2980 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2981 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2982 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2983 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2984 gCamCapability[mCameraId]->color_arrangement);
2985 mStreamConfigInfo.num_streams++;
2986 }
2987
Thierry Strudel3d639192016-09-09 11:52:26 -07002988 /* In HFR mode, if video stream is not added, create a dummy channel so that
2989 * ISP can create a batch mode even for preview only case. This channel is
2990 * never 'start'ed (no stream-on), it is only 'initialized' */
2991 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2992 !m_bIsVideo) {
2993 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2994 setPAAFSupport(dummyFeatureMask,
2995 CAM_STREAM_TYPE_VIDEO,
2996 gCamCapability[mCameraId]->color_arrangement);
2997 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2998 mChannelHandle,
2999 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003000 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07003001 this,
3002 &mDummyBatchStream,
3003 CAM_STREAM_TYPE_VIDEO,
3004 dummyFeatureMask,
3005 mMetadataChannel);
3006 if (NULL == mDummyBatchChannel) {
3007 LOGE("creation of mDummyBatchChannel failed."
3008 "Preview will use non-hfr sensor mode ");
3009 }
3010 }
3011 if (mDummyBatchChannel) {
3012 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3013 mDummyBatchStream.width;
3014 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3015 mDummyBatchStream.height;
3016 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3017 CAM_STREAM_TYPE_VIDEO;
3018 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3019 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3020 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3021 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3022 gCamCapability[mCameraId]->color_arrangement);
3023 mStreamConfigInfo.num_streams++;
3024 }
3025
3026 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3027 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003028 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003029 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003030
3031 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3032 for (pendingRequestIterator i = mPendingRequestsList.begin();
3033 i != mPendingRequestsList.end();) {
3034 i = erasePendingRequest(i);
3035 }
3036 mPendingFrameDropList.clear();
3037 // Initialize/Reset the pending buffers list
3038 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3039 req.mPendingBufferList.clear();
3040 }
3041 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003042 mExpectedInflightDuration = 0;
3043 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003044
Thierry Strudel3d639192016-09-09 11:52:26 -07003045 mCurJpegMeta.clear();
3046 //Get min frame duration for this streams configuration
3047 deriveMinFrameDuration();
3048
Chien-Yu Chenee335912017-02-09 17:53:20 -08003049 mFirstPreviewIntentSeen = false;
3050
3051 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003052 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003053 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3054 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003055 disableHdrPlusModeLocked();
3056 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003057
Thierry Strudel3d639192016-09-09 11:52:26 -07003058 // Update state
3059 mState = CONFIGURED;
3060
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003061 mFirstMetadataCallback = true;
3062
Thierry Strudel3d639192016-09-09 11:52:26 -07003063 pthread_mutex_unlock(&mMutex);
3064
3065 return rc;
3066}
3067
3068/*===========================================================================
3069 * FUNCTION : validateCaptureRequest
3070 *
3071 * DESCRIPTION: validate a capture request from camera service
3072 *
3073 * PARAMETERS :
3074 * @request : request from framework to process
3075 *
3076 * RETURN :
3077 *
3078 *==========================================================================*/
3079int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003080 camera3_capture_request_t *request,
3081 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003082{
3083 ssize_t idx = 0;
3084 const camera3_stream_buffer_t *b;
3085 CameraMetadata meta;
3086
3087 /* Sanity check the request */
3088 if (request == NULL) {
3089 LOGE("NULL capture request");
3090 return BAD_VALUE;
3091 }
3092
3093 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3094 /*settings cannot be null for the first request*/
3095 return BAD_VALUE;
3096 }
3097
3098 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003099 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3100 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003101 LOGE("Request %d: No output buffers provided!",
3102 __FUNCTION__, frameNumber);
3103 return BAD_VALUE;
3104 }
3105 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3106 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3107 request->num_output_buffers, MAX_NUM_STREAMS);
3108 return BAD_VALUE;
3109 }
3110 if (request->input_buffer != NULL) {
3111 b = request->input_buffer;
3112 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3113 LOGE("Request %d: Buffer %ld: Status not OK!",
3114 frameNumber, (long)idx);
3115 return BAD_VALUE;
3116 }
3117 if (b->release_fence != -1) {
3118 LOGE("Request %d: Buffer %ld: Has a release fence!",
3119 frameNumber, (long)idx);
3120 return BAD_VALUE;
3121 }
3122 if (b->buffer == NULL) {
3123 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3124 frameNumber, (long)idx);
3125 return BAD_VALUE;
3126 }
3127 }
3128
3129 // Validate all buffers
3130 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003131 if (b == NULL) {
3132 return BAD_VALUE;
3133 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003134 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003135 QCamera3ProcessingChannel *channel =
3136 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3137 if (channel == NULL) {
3138 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3139 frameNumber, (long)idx);
3140 return BAD_VALUE;
3141 }
3142 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3143 LOGE("Request %d: Buffer %ld: Status not OK!",
3144 frameNumber, (long)idx);
3145 return BAD_VALUE;
3146 }
3147 if (b->release_fence != -1) {
3148 LOGE("Request %d: Buffer %ld: Has a release fence!",
3149 frameNumber, (long)idx);
3150 return BAD_VALUE;
3151 }
3152 if (b->buffer == NULL) {
3153 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3154 frameNumber, (long)idx);
3155 return BAD_VALUE;
3156 }
3157 if (*(b->buffer) == NULL) {
3158 LOGE("Request %d: Buffer %ld: NULL private handle!",
3159 frameNumber, (long)idx);
3160 return BAD_VALUE;
3161 }
3162 idx++;
3163 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003164 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003165 return NO_ERROR;
3166}
3167
3168/*===========================================================================
3169 * FUNCTION : deriveMinFrameDuration
3170 *
3171 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3172 * on currently configured streams.
3173 *
3174 * PARAMETERS : NONE
3175 *
3176 * RETURN : NONE
3177 *
3178 *==========================================================================*/
3179void QCamera3HardwareInterface::deriveMinFrameDuration()
3180{
3181 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003182 bool hasRaw = false;
3183
3184 mMinRawFrameDuration = 0;
3185 mMinJpegFrameDuration = 0;
3186 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003187
3188 maxJpegDim = 0;
3189 maxProcessedDim = 0;
3190 maxRawDim = 0;
3191
3192 // Figure out maximum jpeg, processed, and raw dimensions
3193 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3194 it != mStreamInfo.end(); it++) {
3195
3196 // Input stream doesn't have valid stream_type
3197 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3198 continue;
3199
3200 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3201 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3202 if (dimension > maxJpegDim)
3203 maxJpegDim = dimension;
3204 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3205 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3206 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003207 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003208 if (dimension > maxRawDim)
3209 maxRawDim = dimension;
3210 } else {
3211 if (dimension > maxProcessedDim)
3212 maxProcessedDim = dimension;
3213 }
3214 }
3215
3216 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3217 MAX_SIZES_CNT);
3218
3219 //Assume all jpeg dimensions are in processed dimensions.
3220 if (maxJpegDim > maxProcessedDim)
3221 maxProcessedDim = maxJpegDim;
3222 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003223 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003224 maxRawDim = INT32_MAX;
3225
3226 for (size_t i = 0; i < count; i++) {
3227 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3228 gCamCapability[mCameraId]->raw_dim[i].height;
3229 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3230 maxRawDim = dimension;
3231 }
3232 }
3233
3234 //Find minimum durations for processed, jpeg, and raw
3235 for (size_t i = 0; i < count; i++) {
3236 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3237 gCamCapability[mCameraId]->raw_dim[i].height) {
3238 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3239 break;
3240 }
3241 }
3242 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3243 for (size_t i = 0; i < count; i++) {
3244 if (maxProcessedDim ==
3245 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3246 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3247 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3248 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3249 break;
3250 }
3251 }
3252}
3253
3254/*===========================================================================
3255 * FUNCTION : getMinFrameDuration
3256 *
3257 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3258 * and current request configuration.
3259 *
3260 * PARAMETERS : @request: requset sent by the frameworks
3261 *
3262 * RETURN : min farme duration for a particular request
3263 *
3264 *==========================================================================*/
3265int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3266{
3267 bool hasJpegStream = false;
3268 bool hasRawStream = false;
3269 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3270 const camera3_stream_t *stream = request->output_buffers[i].stream;
3271 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3272 hasJpegStream = true;
3273 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3274 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3275 stream->format == HAL_PIXEL_FORMAT_RAW16)
3276 hasRawStream = true;
3277 }
3278
3279 if (!hasJpegStream)
3280 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3281 else
3282 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3283}
3284
3285/*===========================================================================
3286 * FUNCTION : handleBuffersDuringFlushLock
3287 *
3288 * DESCRIPTION: Account for buffers returned from back-end during flush
3289 * This function is executed while mMutex is held by the caller.
3290 *
3291 * PARAMETERS :
3292 * @buffer: image buffer for the callback
3293 *
3294 * RETURN :
3295 *==========================================================================*/
3296void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3297{
3298 bool buffer_found = false;
3299 for (List<PendingBuffersInRequest>::iterator req =
3300 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3301 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3302 for (List<PendingBufferInfo>::iterator i =
3303 req->mPendingBufferList.begin();
3304 i != req->mPendingBufferList.end(); i++) {
3305 if (i->buffer == buffer->buffer) {
3306 mPendingBuffersMap.numPendingBufsAtFlush--;
3307 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3308 buffer->buffer, req->frame_number,
3309 mPendingBuffersMap.numPendingBufsAtFlush);
3310 buffer_found = true;
3311 break;
3312 }
3313 }
3314 if (buffer_found) {
3315 break;
3316 }
3317 }
3318 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3319 //signal the flush()
3320 LOGD("All buffers returned to HAL. Continue flush");
3321 pthread_cond_signal(&mBuffersCond);
3322 }
3323}
3324
Thierry Strudel3d639192016-09-09 11:52:26 -07003325/*===========================================================================
3326 * FUNCTION : handleBatchMetadata
3327 *
3328 * DESCRIPTION: Handles metadata buffer callback in batch mode
3329 *
3330 * PARAMETERS : @metadata_buf: metadata buffer
3331 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3332 * the meta buf in this method
3333 *
3334 * RETURN :
3335 *
3336 *==========================================================================*/
3337void QCamera3HardwareInterface::handleBatchMetadata(
3338 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3339{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003340 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003341
3342 if (NULL == metadata_buf) {
3343 LOGE("metadata_buf is NULL");
3344 return;
3345 }
3346 /* In batch mode, the metdata will contain the frame number and timestamp of
3347 * the last frame in the batch. Eg: a batch containing buffers from request
3348 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3349 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3350 * multiple process_capture_results */
3351 metadata_buffer_t *metadata =
3352 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3353 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3354 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3355 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3356 uint32_t frame_number = 0, urgent_frame_number = 0;
3357 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3358 bool invalid_metadata = false;
3359 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3360 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003361 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003362
3363 int32_t *p_frame_number_valid =
3364 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3365 uint32_t *p_frame_number =
3366 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3367 int64_t *p_capture_time =
3368 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3369 int32_t *p_urgent_frame_number_valid =
3370 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3371 uint32_t *p_urgent_frame_number =
3372 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3373
3374 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3375 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3376 (NULL == p_urgent_frame_number)) {
3377 LOGE("Invalid metadata");
3378 invalid_metadata = true;
3379 } else {
3380 frame_number_valid = *p_frame_number_valid;
3381 last_frame_number = *p_frame_number;
3382 last_frame_capture_time = *p_capture_time;
3383 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3384 last_urgent_frame_number = *p_urgent_frame_number;
3385 }
3386
3387 /* In batchmode, when no video buffers are requested, set_parms are sent
3388 * for every capture_request. The difference between consecutive urgent
3389 * frame numbers and frame numbers should be used to interpolate the
3390 * corresponding frame numbers and time stamps */
3391 pthread_mutex_lock(&mMutex);
3392 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003393 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3394 if(idx < 0) {
3395 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3396 last_urgent_frame_number);
3397 mState = ERROR;
3398 pthread_mutex_unlock(&mMutex);
3399 return;
3400 }
3401 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003402 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3403 first_urgent_frame_number;
3404
3405 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3406 urgent_frame_number_valid,
3407 first_urgent_frame_number, last_urgent_frame_number);
3408 }
3409
3410 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003411 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3412 if(idx < 0) {
3413 LOGE("Invalid frame number received: %d. Irrecoverable error",
3414 last_frame_number);
3415 mState = ERROR;
3416 pthread_mutex_unlock(&mMutex);
3417 return;
3418 }
3419 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003420 frameNumDiff = last_frame_number + 1 -
3421 first_frame_number;
3422 mPendingBatchMap.removeItem(last_frame_number);
3423
3424 LOGD("frm: valid: %d frm_num: %d - %d",
3425 frame_number_valid,
3426 first_frame_number, last_frame_number);
3427
3428 }
3429 pthread_mutex_unlock(&mMutex);
3430
3431 if (urgent_frame_number_valid || frame_number_valid) {
3432 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3433 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3434 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3435 urgentFrameNumDiff, last_urgent_frame_number);
3436 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3437 LOGE("frameNumDiff: %d frameNum: %d",
3438 frameNumDiff, last_frame_number);
3439 }
3440
3441 for (size_t i = 0; i < loopCount; i++) {
3442 /* handleMetadataWithLock is called even for invalid_metadata for
3443 * pipeline depth calculation */
3444 if (!invalid_metadata) {
3445 /* Infer frame number. Batch metadata contains frame number of the
3446 * last frame */
3447 if (urgent_frame_number_valid) {
3448 if (i < urgentFrameNumDiff) {
3449 urgent_frame_number =
3450 first_urgent_frame_number + i;
3451 LOGD("inferred urgent frame_number: %d",
3452 urgent_frame_number);
3453 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3454 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3455 } else {
3456 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3457 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3458 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3459 }
3460 }
3461
3462 /* Infer frame number. Batch metadata contains frame number of the
3463 * last frame */
3464 if (frame_number_valid) {
3465 if (i < frameNumDiff) {
3466 frame_number = first_frame_number + i;
3467 LOGD("inferred frame_number: %d", frame_number);
3468 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3469 CAM_INTF_META_FRAME_NUMBER, frame_number);
3470 } else {
3471 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3472 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3473 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3474 }
3475 }
3476
3477 if (last_frame_capture_time) {
3478 //Infer timestamp
3479 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003480 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003481 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003482 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003483 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3484 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3485 LOGD("batch capture_time: %lld, capture_time: %lld",
3486 last_frame_capture_time, capture_time);
3487 }
3488 }
3489 pthread_mutex_lock(&mMutex);
3490 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003491 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003492 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3493 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003494 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 pthread_mutex_unlock(&mMutex);
3496 }
3497
3498 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003499 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003500 mMetadataChannel->bufDone(metadata_buf);
3501 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003502 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003503 }
3504}
3505
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003506void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3507 camera3_error_msg_code_t errorCode)
3508{
3509 camera3_notify_msg_t notify_msg;
3510 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3511 notify_msg.type = CAMERA3_MSG_ERROR;
3512 notify_msg.message.error.error_code = errorCode;
3513 notify_msg.message.error.error_stream = NULL;
3514 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003515 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003516
3517 return;
3518}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003519
3520/*===========================================================================
3521 * FUNCTION : sendPartialMetadataWithLock
3522 *
3523 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3524 *
3525 * PARAMETERS : @metadata: metadata buffer
3526 * @requestIter: The iterator for the pending capture request for
3527 * which the partial result is being sen
3528 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3529 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003530 * @isJumpstartMetadata: Whether this is a partial metadata for
3531 * jumpstart, i.e. even though it doesn't map to a valid partial
3532 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003533 *
3534 * RETURN :
3535 *
3536 *==========================================================================*/
3537
3538void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3539 metadata_buffer_t *metadata,
3540 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003541 bool lastUrgentMetadataInBatch,
3542 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003543{
3544 camera3_capture_result_t result;
3545 memset(&result, 0, sizeof(camera3_capture_result_t));
3546
3547 requestIter->partial_result_cnt++;
3548
3549 // Extract 3A metadata
3550 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003551 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3552 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003553 // Populate metadata result
3554 result.frame_number = requestIter->frame_number;
3555 result.num_output_buffers = 0;
3556 result.output_buffers = NULL;
3557 result.partial_result = requestIter->partial_result_cnt;
3558
3559 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003560 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003561 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3562 // Notify HDR+ client about the partial metadata.
3563 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3564 result.partial_result == PARTIAL_RESULT_COUNT);
3565 }
3566 }
3567
3568 orchestrateResult(&result);
3569 LOGD("urgent frame_number = %u", result.frame_number);
3570 free_camera_metadata((camera_metadata_t *)result.result);
3571}
3572
Thierry Strudel3d639192016-09-09 11:52:26 -07003573/*===========================================================================
3574 * FUNCTION : handleMetadataWithLock
3575 *
3576 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3577 *
3578 * PARAMETERS : @metadata_buf: metadata buffer
3579 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3580 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003581 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3582 * last urgent metadata in a batch. Always true for non-batch mode
3583 * @lastMetadataInBatch: Boolean to indicate whether this is the
3584 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003585 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3586 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003587 *
3588 * RETURN :
3589 *
3590 *==========================================================================*/
3591void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003592 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003593 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3594 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003595{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003596 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003597 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3598 //during flush do not send metadata from this thread
3599 LOGD("not sending metadata during flush or when mState is error");
3600 if (free_and_bufdone_meta_buf) {
3601 mMetadataChannel->bufDone(metadata_buf);
3602 free(metadata_buf);
3603 }
3604 return;
3605 }
3606
3607 //not in flush
3608 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3609 int32_t frame_number_valid, urgent_frame_number_valid;
3610 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003611 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003612 nsecs_t currentSysTime;
3613
3614 int32_t *p_frame_number_valid =
3615 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3616 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3617 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003618 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003619 int32_t *p_urgent_frame_number_valid =
3620 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3621 uint32_t *p_urgent_frame_number =
3622 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3623 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3624 metadata) {
3625 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3626 *p_frame_number_valid, *p_frame_number);
3627 }
3628
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003629 camera_metadata_t *resultMetadata = nullptr;
3630
Thierry Strudel3d639192016-09-09 11:52:26 -07003631 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3632 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3633 LOGE("Invalid metadata");
3634 if (free_and_bufdone_meta_buf) {
3635 mMetadataChannel->bufDone(metadata_buf);
3636 free(metadata_buf);
3637 }
3638 goto done_metadata;
3639 }
3640 frame_number_valid = *p_frame_number_valid;
3641 frame_number = *p_frame_number;
3642 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003643 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003644 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3645 urgent_frame_number = *p_urgent_frame_number;
3646 currentSysTime = systemTime(CLOCK_MONOTONIC);
3647
Jason Lee603176d2017-05-31 11:43:27 -07003648 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3649 const int tries = 3;
3650 nsecs_t bestGap, measured;
3651 for (int i = 0; i < tries; ++i) {
3652 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3653 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3654 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3655 const nsecs_t gap = tmono2 - tmono;
3656 if (i == 0 || gap < bestGap) {
3657 bestGap = gap;
3658 measured = tbase - ((tmono + tmono2) >> 1);
3659 }
3660 }
3661 capture_time -= measured;
3662 }
3663
Thierry Strudel3d639192016-09-09 11:52:26 -07003664 // Detect if buffers from any requests are overdue
3665 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003666 int64_t timeout;
3667 {
3668 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3669 // If there is a pending HDR+ request, the following requests may be blocked until the
3670 // HDR+ request is done. So allow a longer timeout.
3671 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3672 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003673 if (timeout < mExpectedInflightDuration) {
3674 timeout = mExpectedInflightDuration;
3675 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003676 }
3677
3678 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003679 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003680 assert(missed.stream->priv);
3681 if (missed.stream->priv) {
3682 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3683 assert(ch->mStreams[0]);
3684 if (ch->mStreams[0]) {
3685 LOGE("Cancel missing frame = %d, buffer = %p,"
3686 "stream type = %d, stream format = %d",
3687 req.frame_number, missed.buffer,
3688 ch->mStreams[0]->getMyType(), missed.stream->format);
3689 ch->timeoutFrame(req.frame_number);
3690 }
3691 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003692 }
3693 }
3694 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003695 //For the very first metadata callback, regardless whether it contains valid
3696 //frame number, send the partial metadata for the jumpstarting requests.
3697 //Note that this has to be done even if the metadata doesn't contain valid
3698 //urgent frame number, because in the case only 1 request is ever submitted
3699 //to HAL, there won't be subsequent valid urgent frame number.
3700 if (mFirstMetadataCallback) {
3701 for (pendingRequestIterator i =
3702 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3703 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003704 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3705 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003706 }
3707 }
3708 mFirstMetadataCallback = false;
3709 }
3710
Thierry Strudel3d639192016-09-09 11:52:26 -07003711 //Partial result on process_capture_result for timestamp
3712 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003713 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003714
3715 //Recieved an urgent Frame Number, handle it
3716 //using partial results
3717 for (pendingRequestIterator i =
3718 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3719 LOGD("Iterator Frame = %d urgent frame = %d",
3720 i->frame_number, urgent_frame_number);
3721
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003722 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003723 (i->partial_result_cnt == 0)) {
3724 LOGE("Error: HAL missed urgent metadata for frame number %d",
3725 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003726 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003727 }
3728
3729 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003730 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003731 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3732 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003733 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3734 // Instant AEC settled for this frame.
3735 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3736 mInstantAECSettledFrameNumber = urgent_frame_number;
3737 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003738 break;
3739 }
3740 }
3741 }
3742
3743 if (!frame_number_valid) {
3744 LOGD("Not a valid normal frame number, used as SOF only");
3745 if (free_and_bufdone_meta_buf) {
3746 mMetadataChannel->bufDone(metadata_buf);
3747 free(metadata_buf);
3748 }
3749 goto done_metadata;
3750 }
3751 LOGH("valid frame_number = %u, capture_time = %lld",
3752 frame_number, capture_time);
3753
Emilian Peev4e0fe952017-06-30 12:40:09 -07003754 handleDepthDataLocked(metadata->depth_data, frame_number,
3755 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003756
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003757 // Check whether any stream buffer corresponding to this is dropped or not
3758 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3759 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3760 for (auto & pendingRequest : mPendingRequestsList) {
3761 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3762 mInstantAECSettledFrameNumber)) {
3763 camera3_notify_msg_t notify_msg = {};
3764 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003765 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003766 QCamera3ProcessingChannel *channel =
3767 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003768 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003769 if (p_cam_frame_drop) {
3770 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003771 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003772 // Got the stream ID for drop frame.
3773 dropFrame = true;
3774 break;
3775 }
3776 }
3777 } else {
3778 // This is instant AEC case.
3779 // For instant AEC drop the stream untill AEC is settled.
3780 dropFrame = true;
3781 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003782
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003783 if (dropFrame) {
3784 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3785 if (p_cam_frame_drop) {
3786 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003787 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003788 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003789 } else {
3790 // For instant AEC, inform frame drop and frame number
3791 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3792 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003793 pendingRequest.frame_number, streamID,
3794 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003795 }
3796 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003797 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003798 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003799 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003800 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003801 if (p_cam_frame_drop) {
3802 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003803 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003804 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003805 } else {
3806 // For instant AEC, inform frame drop and frame number
3807 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3808 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003809 pendingRequest.frame_number, streamID,
3810 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003811 }
3812 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003813 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003814 PendingFrameDrop.stream_ID = streamID;
3815 // Add the Frame drop info to mPendingFrameDropList
3816 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003817 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003818 }
3819 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003820 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003821
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003822 for (auto & pendingRequest : mPendingRequestsList) {
3823 // Find the pending request with the frame number.
3824 if (pendingRequest.frame_number == frame_number) {
3825 // Update the sensor timestamp.
3826 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003827
Thierry Strudel3d639192016-09-09 11:52:26 -07003828
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003829 /* Set the timestamp in display metadata so that clients aware of
3830 private_handle such as VT can use this un-modified timestamps.
3831 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003832 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003833
Thierry Strudel3d639192016-09-09 11:52:26 -07003834 // Find channel requiring metadata, meaning internal offline postprocess
3835 // is needed.
3836 //TODO: for now, we don't support two streams requiring metadata at the same time.
3837 // (because we are not making copies, and metadata buffer is not reference counted.
3838 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003839 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3840 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003841 if (iter->need_metadata) {
3842 internalPproc = true;
3843 QCamera3ProcessingChannel *channel =
3844 (QCamera3ProcessingChannel *)iter->stream->priv;
3845 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003846 if(p_is_metabuf_queued != NULL) {
3847 *p_is_metabuf_queued = true;
3848 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003849 break;
3850 }
3851 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003852 for (auto itr = pendingRequest.internalRequestList.begin();
3853 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003854 if (itr->need_metadata) {
3855 internalPproc = true;
3856 QCamera3ProcessingChannel *channel =
3857 (QCamera3ProcessingChannel *)itr->stream->priv;
3858 channel->queueReprocMetadata(metadata_buf);
3859 break;
3860 }
3861 }
3862
Thierry Strudel54dc9782017-02-15 12:12:10 -08003863 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003864
3865 bool *enableZsl = nullptr;
3866 if (gExposeEnableZslKey) {
3867 enableZsl = &pendingRequest.enableZsl;
3868 }
3869
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003870 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003871 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003872 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003873
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003874 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003875
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003876 if (pendingRequest.blob_request) {
3877 //Dump tuning metadata if enabled and available
3878 char prop[PROPERTY_VALUE_MAX];
3879 memset(prop, 0, sizeof(prop));
3880 property_get("persist.camera.dumpmetadata", prop, "0");
3881 int32_t enabled = atoi(prop);
3882 if (enabled && metadata->is_tuning_params_valid) {
3883 dumpMetadataToFile(metadata->tuning_params,
3884 mMetaFrameCount,
3885 enabled,
3886 "Snapshot",
3887 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003888 }
3889 }
3890
3891 if (!internalPproc) {
3892 LOGD("couldn't find need_metadata for this metadata");
3893 // Return metadata buffer
3894 if (free_and_bufdone_meta_buf) {
3895 mMetadataChannel->bufDone(metadata_buf);
3896 free(metadata_buf);
3897 }
3898 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003899
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003900 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003901 }
3902 }
3903
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003904 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3905
3906 // Try to send out capture result metadata.
3907 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003908 return;
3909
Thierry Strudel3d639192016-09-09 11:52:26 -07003910done_metadata:
3911 for (pendingRequestIterator i = mPendingRequestsList.begin();
3912 i != mPendingRequestsList.end() ;i++) {
3913 i->pipeline_depth++;
3914 }
3915 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3916 unblockRequestIfNecessary();
3917}
3918
3919/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003920 * FUNCTION : handleDepthDataWithLock
3921 *
3922 * DESCRIPTION: Handles incoming depth data
3923 *
3924 * PARAMETERS : @depthData : Depth data
3925 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003926 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003927 *
3928 * RETURN :
3929 *
3930 *==========================================================================*/
3931void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003932 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003933 uint32_t currentFrameNumber;
3934 buffer_handle_t *depthBuffer;
3935
3936 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003937 return;
3938 }
3939
3940 camera3_stream_buffer_t resultBuffer =
3941 {.acquire_fence = -1,
3942 .release_fence = -1,
3943 .status = CAMERA3_BUFFER_STATUS_OK,
3944 .buffer = nullptr,
3945 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003946 do {
3947 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3948 if (nullptr == depthBuffer) {
3949 break;
3950 }
3951
Emilian Peev7650c122017-01-19 08:24:33 -08003952 resultBuffer.buffer = depthBuffer;
3953 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003954 if (valid) {
3955 int32_t rc = mDepthChannel->populateDepthData(depthData,
3956 frameNumber);
3957 if (NO_ERROR != rc) {
3958 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3959 } else {
3960 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3961 }
Emilian Peev7650c122017-01-19 08:24:33 -08003962 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003963 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003964 }
3965 } else if (currentFrameNumber > frameNumber) {
3966 break;
3967 } else {
3968 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3969 {{currentFrameNumber, mDepthChannel->getStream(),
3970 CAMERA3_MSG_ERROR_BUFFER}}};
3971 orchestrateNotify(&notify_msg);
3972
3973 LOGE("Depth buffer for frame number: %d is missing "
3974 "returning back!", currentFrameNumber);
3975 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3976 }
3977 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003978 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003979 } while (currentFrameNumber < frameNumber);
3980}
3981
3982/*===========================================================================
3983 * FUNCTION : notifyErrorFoPendingDepthData
3984 *
3985 * DESCRIPTION: Returns error for any pending depth buffers
3986 *
3987 * PARAMETERS : depthCh - depth channel that needs to get flushed
3988 *
3989 * RETURN :
3990 *
3991 *==========================================================================*/
3992void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3993 QCamera3DepthChannel *depthCh) {
3994 uint32_t currentFrameNumber;
3995 buffer_handle_t *depthBuffer;
3996
3997 if (nullptr == depthCh) {
3998 return;
3999 }
4000
4001 camera3_notify_msg_t notify_msg =
4002 {.type = CAMERA3_MSG_ERROR,
4003 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4004 camera3_stream_buffer_t resultBuffer =
4005 {.acquire_fence = -1,
4006 .release_fence = -1,
4007 .buffer = nullptr,
4008 .stream = depthCh->getStream(),
4009 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004010
4011 while (nullptr !=
4012 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4013 depthCh->unmapBuffer(currentFrameNumber);
4014
4015 notify_msg.message.error.frame_number = currentFrameNumber;
4016 orchestrateNotify(&notify_msg);
4017
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004018 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004019 };
4020}
4021
4022/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004023 * FUNCTION : hdrPlusPerfLock
4024 *
4025 * DESCRIPTION: perf lock for HDR+ using custom intent
4026 *
4027 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4028 *
4029 * RETURN : None
4030 *
4031 *==========================================================================*/
4032void QCamera3HardwareInterface::hdrPlusPerfLock(
4033 mm_camera_super_buf_t *metadata_buf)
4034{
4035 if (NULL == metadata_buf) {
4036 LOGE("metadata_buf is NULL");
4037 return;
4038 }
4039 metadata_buffer_t *metadata =
4040 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4041 int32_t *p_frame_number_valid =
4042 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4043 uint32_t *p_frame_number =
4044 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4045
4046 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4047 LOGE("%s: Invalid metadata", __func__);
4048 return;
4049 }
4050
Wei Wang01385482017-08-03 10:49:34 -07004051 //acquire perf lock for 2 secs after the last HDR frame is captured
4052 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004053 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4054 if ((p_frame_number != NULL) &&
4055 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004056 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004057 }
4058 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004059}
4060
4061/*===========================================================================
4062 * FUNCTION : handleInputBufferWithLock
4063 *
4064 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4065 *
4066 * PARAMETERS : @frame_number: frame number of the input buffer
4067 *
4068 * RETURN :
4069 *
4070 *==========================================================================*/
4071void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4072{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004073 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004074 pendingRequestIterator i = mPendingRequestsList.begin();
4075 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4076 i++;
4077 }
4078 if (i != mPendingRequestsList.end() && i->input_buffer) {
4079 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004080 CameraMetadata settings;
4081 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4082 if(i->settings) {
4083 settings = i->settings;
4084 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4085 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004086 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004087 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004088 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004089 } else {
4090 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004091 }
4092
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004093 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4094 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4095 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004096
4097 camera3_capture_result result;
4098 memset(&result, 0, sizeof(camera3_capture_result));
4099 result.frame_number = frame_number;
4100 result.result = i->settings;
4101 result.input_buffer = i->input_buffer;
4102 result.partial_result = PARTIAL_RESULT_COUNT;
4103
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004104 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004105 LOGD("Input request metadata and input buffer frame_number = %u",
4106 i->frame_number);
4107 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004108
4109 // Dispatch result metadata that may be just unblocked by this reprocess result.
4110 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004111 } else {
4112 LOGE("Could not find input request for frame number %d", frame_number);
4113 }
4114}
4115
4116/*===========================================================================
4117 * FUNCTION : handleBufferWithLock
4118 *
4119 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4120 *
4121 * PARAMETERS : @buffer: image buffer for the callback
4122 * @frame_number: frame number of the image buffer
4123 *
4124 * RETURN :
4125 *
4126 *==========================================================================*/
4127void QCamera3HardwareInterface::handleBufferWithLock(
4128 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4129{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004130 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004131
4132 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4133 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4134 }
4135
Thierry Strudel3d639192016-09-09 11:52:26 -07004136 /* Nothing to be done during error state */
4137 if ((ERROR == mState) || (DEINIT == mState)) {
4138 return;
4139 }
4140 if (mFlushPerf) {
4141 handleBuffersDuringFlushLock(buffer);
4142 return;
4143 }
4144 //not in flush
4145 // If the frame number doesn't exist in the pending request list,
4146 // directly send the buffer to the frameworks, and update pending buffers map
4147 // Otherwise, book-keep the buffer.
4148 pendingRequestIterator i = mPendingRequestsList.begin();
4149 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4150 i++;
4151 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004152
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004153 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004154 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004155 // For a reprocessing request, try to send out result metadata.
4156 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004157 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004158 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004159
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004160 // Check if this frame was dropped.
4161 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4162 m != mPendingFrameDropList.end(); m++) {
4163 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4164 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4165 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4166 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4167 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4168 frame_number, streamID);
4169 m = mPendingFrameDropList.erase(m);
4170 break;
4171 }
4172 }
4173
4174 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4175 LOGH("result frame_number = %d, buffer = %p",
4176 frame_number, buffer->buffer);
4177
4178 mPendingBuffersMap.removeBuf(buffer->buffer);
4179 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4180
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004181 if (mPreviewStarted == false) {
4182 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4183 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004184 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4185
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004186 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4187 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4188 mPreviewStarted = true;
4189
4190 // Set power hint for preview
4191 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4192 }
4193 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004194}
4195
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004196void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004197 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004198{
4199 // Find the pending request for this result metadata.
4200 auto requestIter = mPendingRequestsList.begin();
4201 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4202 requestIter++;
4203 }
4204
4205 if (requestIter == mPendingRequestsList.end()) {
4206 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4207 return;
4208 }
4209
4210 // Update the result metadata
4211 requestIter->resultMetadata = resultMetadata;
4212
4213 // Check what type of request this is.
4214 bool liveRequest = false;
4215 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004216 // HDR+ request doesn't have partial results.
4217 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004218 } else if (requestIter->input_buffer != nullptr) {
4219 // Reprocessing request result is the same as settings.
4220 requestIter->resultMetadata = requestIter->settings;
4221 // Reprocessing request doesn't have partial results.
4222 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4223 } else {
4224 liveRequest = true;
Chien-Yu Chen0a921f92017-08-27 17:25:33 -07004225 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004226 mPendingLiveRequest--;
4227
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004228 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004229 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004230 // For a live request, send the metadata to HDR+ client.
4231 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4232 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4233 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4234 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004235 }
4236 }
4237
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004238 // Remove len shading map if it's not requested.
4239 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4240 CameraMetadata metadata;
4241 metadata.acquire(resultMetadata);
4242 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4243 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4244 &requestIter->requestedLensShadingMapMode, 1);
4245
4246 requestIter->resultMetadata = metadata.release();
4247 }
4248
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004249 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4250}
4251
4252void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4253 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004254 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4255 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004256 bool readyToSend = true;
4257
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004258 // Iterate through the pending requests to send out result metadata that are ready. Also if
4259 // this result metadata belongs to a live request, notify errors for previous live requests
4260 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004261 auto iter = mPendingRequestsList.begin();
4262 while (iter != mPendingRequestsList.end()) {
4263 // Check if current pending request is ready. If it's not ready, the following pending
4264 // requests are also not ready.
4265 if (readyToSend && iter->resultMetadata == nullptr) {
4266 readyToSend = false;
4267 }
4268
4269 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4270
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004271 camera3_capture_result_t result = {};
4272 result.frame_number = iter->frame_number;
4273 result.result = iter->resultMetadata;
4274 result.partial_result = iter->partial_result_cnt;
4275
4276 // If this pending buffer has result metadata, we may be able to send out shutter callback
4277 // and result metadata.
4278 if (iter->resultMetadata != nullptr) {
4279 if (!readyToSend) {
4280 // If any of the previous pending request is not ready, this pending request is
4281 // also not ready to send in order to keep shutter callbacks and result metadata
4282 // in order.
4283 iter++;
4284 continue;
4285 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004286 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004287 // If the result metadata belongs to a live request, notify errors for previous pending
4288 // live requests.
4289 mPendingLiveRequest--;
4290
4291 CameraMetadata dummyMetadata;
4292 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4293 result.result = dummyMetadata.release();
4294
4295 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004296
4297 // partial_result should be PARTIAL_RESULT_CNT in case of
4298 // ERROR_RESULT.
4299 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4300 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004301 } else {
4302 iter++;
4303 continue;
4304 }
4305
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004306 result.output_buffers = nullptr;
4307 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004308 orchestrateResult(&result);
4309
4310 // For reprocessing, result metadata is the same as settings so do not free it here to
4311 // avoid double free.
4312 if (result.result != iter->settings) {
4313 free_camera_metadata((camera_metadata_t *)result.result);
4314 }
4315 iter->resultMetadata = nullptr;
4316 iter = erasePendingRequest(iter);
4317 }
4318
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004319 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004320 for (auto &iter : mPendingRequestsList) {
4321 // Increment pipeline depth for the following pending requests.
4322 if (iter.frame_number > frameNumber) {
4323 iter.pipeline_depth++;
4324 }
4325 }
4326 }
4327
4328 unblockRequestIfNecessary();
4329}
4330
Thierry Strudel3d639192016-09-09 11:52:26 -07004331/*===========================================================================
4332 * FUNCTION : unblockRequestIfNecessary
4333 *
4334 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4335 * that mMutex is held when this function is called.
4336 *
4337 * PARAMETERS :
4338 *
4339 * RETURN :
4340 *
4341 *==========================================================================*/
4342void QCamera3HardwareInterface::unblockRequestIfNecessary()
4343{
4344 // Unblock process_capture_request
4345 pthread_cond_signal(&mRequestCond);
4346}
4347
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004348/*===========================================================================
4349 * FUNCTION : isHdrSnapshotRequest
4350 *
4351 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4352 *
4353 * PARAMETERS : camera3 request structure
4354 *
4355 * RETURN : boolean decision variable
4356 *
4357 *==========================================================================*/
4358bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4359{
4360 if (request == NULL) {
4361 LOGE("Invalid request handle");
4362 assert(0);
4363 return false;
4364 }
4365
4366 if (!mForceHdrSnapshot) {
4367 CameraMetadata frame_settings;
4368 frame_settings = request->settings;
4369
4370 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4371 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4372 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4373 return false;
4374 }
4375 } else {
4376 return false;
4377 }
4378
4379 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4380 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4381 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4382 return false;
4383 }
4384 } else {
4385 return false;
4386 }
4387 }
4388
4389 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4390 if (request->output_buffers[i].stream->format
4391 == HAL_PIXEL_FORMAT_BLOB) {
4392 return true;
4393 }
4394 }
4395
4396 return false;
4397}
4398/*===========================================================================
4399 * FUNCTION : orchestrateRequest
4400 *
4401 * DESCRIPTION: Orchestrates a capture request from camera service
4402 *
4403 * PARAMETERS :
4404 * @request : request from framework to process
4405 *
4406 * RETURN : Error status codes
4407 *
4408 *==========================================================================*/
4409int32_t QCamera3HardwareInterface::orchestrateRequest(
4410 camera3_capture_request_t *request)
4411{
4412
4413 uint32_t originalFrameNumber = request->frame_number;
4414 uint32_t originalOutputCount = request->num_output_buffers;
4415 const camera_metadata_t *original_settings = request->settings;
4416 List<InternalRequest> internallyRequestedStreams;
4417 List<InternalRequest> emptyInternalList;
4418
4419 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4420 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4421 uint32_t internalFrameNumber;
4422 CameraMetadata modified_meta;
4423
4424
4425 /* Add Blob channel to list of internally requested streams */
4426 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4427 if (request->output_buffers[i].stream->format
4428 == HAL_PIXEL_FORMAT_BLOB) {
4429 InternalRequest streamRequested;
4430 streamRequested.meteringOnly = 1;
4431 streamRequested.need_metadata = 0;
4432 streamRequested.stream = request->output_buffers[i].stream;
4433 internallyRequestedStreams.push_back(streamRequested);
4434 }
4435 }
4436 request->num_output_buffers = 0;
4437 auto itr = internallyRequestedStreams.begin();
4438
4439 /* Modify setting to set compensation */
4440 modified_meta = request->settings;
4441 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4442 uint8_t aeLock = 1;
4443 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4444 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4445 camera_metadata_t *modified_settings = modified_meta.release();
4446 request->settings = modified_settings;
4447
4448 /* Capture Settling & -2x frame */
4449 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4450 request->frame_number = internalFrameNumber;
4451 processCaptureRequest(request, internallyRequestedStreams);
4452
4453 request->num_output_buffers = originalOutputCount;
4454 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4455 request->frame_number = internalFrameNumber;
4456 processCaptureRequest(request, emptyInternalList);
4457 request->num_output_buffers = 0;
4458
4459 modified_meta = modified_settings;
4460 expCompensation = 0;
4461 aeLock = 1;
4462 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4463 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4464 modified_settings = modified_meta.release();
4465 request->settings = modified_settings;
4466
4467 /* Capture Settling & 0X frame */
4468
4469 itr = internallyRequestedStreams.begin();
4470 if (itr == internallyRequestedStreams.end()) {
4471 LOGE("Error Internally Requested Stream list is empty");
4472 assert(0);
4473 } else {
4474 itr->need_metadata = 0;
4475 itr->meteringOnly = 1;
4476 }
4477
4478 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4479 request->frame_number = internalFrameNumber;
4480 processCaptureRequest(request, internallyRequestedStreams);
4481
4482 itr = internallyRequestedStreams.begin();
4483 if (itr == internallyRequestedStreams.end()) {
4484 ALOGE("Error Internally Requested Stream list is empty");
4485 assert(0);
4486 } else {
4487 itr->need_metadata = 1;
4488 itr->meteringOnly = 0;
4489 }
4490
4491 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4492 request->frame_number = internalFrameNumber;
4493 processCaptureRequest(request, internallyRequestedStreams);
4494
4495 /* Capture 2X frame*/
4496 modified_meta = modified_settings;
4497 expCompensation = GB_HDR_2X_STEP_EV;
4498 aeLock = 1;
4499 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4500 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4501 modified_settings = modified_meta.release();
4502 request->settings = modified_settings;
4503
4504 itr = internallyRequestedStreams.begin();
4505 if (itr == internallyRequestedStreams.end()) {
4506 ALOGE("Error Internally Requested Stream list is empty");
4507 assert(0);
4508 } else {
4509 itr->need_metadata = 0;
4510 itr->meteringOnly = 1;
4511 }
4512 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4513 request->frame_number = internalFrameNumber;
4514 processCaptureRequest(request, internallyRequestedStreams);
4515
4516 itr = internallyRequestedStreams.begin();
4517 if (itr == internallyRequestedStreams.end()) {
4518 ALOGE("Error Internally Requested Stream list is empty");
4519 assert(0);
4520 } else {
4521 itr->need_metadata = 1;
4522 itr->meteringOnly = 0;
4523 }
4524
4525 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4526 request->frame_number = internalFrameNumber;
4527 processCaptureRequest(request, internallyRequestedStreams);
4528
4529
4530 /* Capture 2X on original streaming config*/
4531 internallyRequestedStreams.clear();
4532
4533 /* Restore original settings pointer */
4534 request->settings = original_settings;
4535 } else {
4536 uint32_t internalFrameNumber;
4537 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4538 request->frame_number = internalFrameNumber;
4539 return processCaptureRequest(request, internallyRequestedStreams);
4540 }
4541
4542 return NO_ERROR;
4543}
4544
4545/*===========================================================================
4546 * FUNCTION : orchestrateResult
4547 *
4548 * DESCRIPTION: Orchestrates a capture result to camera service
4549 *
4550 * PARAMETERS :
4551 * @request : request from framework to process
4552 *
4553 * RETURN :
4554 *
4555 *==========================================================================*/
4556void QCamera3HardwareInterface::orchestrateResult(
4557 camera3_capture_result_t *result)
4558{
4559 uint32_t frameworkFrameNumber;
4560 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4561 frameworkFrameNumber);
4562 if (rc != NO_ERROR) {
4563 LOGE("Cannot find translated frameworkFrameNumber");
4564 assert(0);
4565 } else {
4566 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004567 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004568 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004569 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004570 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4571 camera_metadata_entry_t entry;
4572 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4573 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004574 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004575 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4576 if (ret != OK)
4577 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004578 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004579 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004580 result->frame_number = frameworkFrameNumber;
4581 mCallbackOps->process_capture_result(mCallbackOps, result);
4582 }
4583 }
4584}
4585
4586/*===========================================================================
4587 * FUNCTION : orchestrateNotify
4588 *
4589 * DESCRIPTION: Orchestrates a notify to camera service
4590 *
4591 * PARAMETERS :
4592 * @request : request from framework to process
4593 *
4594 * RETURN :
4595 *
4596 *==========================================================================*/
4597void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4598{
4599 uint32_t frameworkFrameNumber;
4600 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004601 int32_t rc = NO_ERROR;
4602
4603 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004604 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004605
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004606 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004607 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4608 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4609 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004610 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004611 LOGE("Cannot find translated frameworkFrameNumber");
4612 assert(0);
4613 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004614 }
4615 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004616
4617 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4618 LOGD("Internal Request drop the notifyCb");
4619 } else {
4620 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4621 mCallbackOps->notify(mCallbackOps, notify_msg);
4622 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004623}
4624
4625/*===========================================================================
4626 * FUNCTION : FrameNumberRegistry
4627 *
4628 * DESCRIPTION: Constructor
4629 *
4630 * PARAMETERS :
4631 *
4632 * RETURN :
4633 *
4634 *==========================================================================*/
4635FrameNumberRegistry::FrameNumberRegistry()
4636{
4637 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4638}
4639
4640/*===========================================================================
4641 * FUNCTION : ~FrameNumberRegistry
4642 *
4643 * DESCRIPTION: Destructor
4644 *
4645 * PARAMETERS :
4646 *
4647 * RETURN :
4648 *
4649 *==========================================================================*/
4650FrameNumberRegistry::~FrameNumberRegistry()
4651{
4652}
4653
4654/*===========================================================================
4655 * FUNCTION : PurgeOldEntriesLocked
4656 *
4657 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4658 *
4659 * PARAMETERS :
4660 *
4661 * RETURN : NONE
4662 *
4663 *==========================================================================*/
4664void FrameNumberRegistry::purgeOldEntriesLocked()
4665{
4666 while (_register.begin() != _register.end()) {
4667 auto itr = _register.begin();
4668 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4669 _register.erase(itr);
4670 } else {
4671 return;
4672 }
4673 }
4674}
4675
4676/*===========================================================================
4677 * FUNCTION : allocStoreInternalFrameNumber
4678 *
4679 * DESCRIPTION: Method to note down a framework request and associate a new
4680 * internal request number against it
4681 *
4682 * PARAMETERS :
4683 * @fFrameNumber: Identifier given by framework
4684 * @internalFN : Output parameter which will have the newly generated internal
4685 * entry
4686 *
4687 * RETURN : Error code
4688 *
4689 *==========================================================================*/
4690int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4691 uint32_t &internalFrameNumber)
4692{
4693 Mutex::Autolock lock(mRegistryLock);
4694 internalFrameNumber = _nextFreeInternalNumber++;
4695 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4696 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4697 purgeOldEntriesLocked();
4698 return NO_ERROR;
4699}
4700
4701/*===========================================================================
4702 * FUNCTION : generateStoreInternalFrameNumber
4703 *
4704 * DESCRIPTION: Method to associate a new internal request number independent
4705 * of any associate with framework requests
4706 *
4707 * PARAMETERS :
4708 * @internalFrame#: Output parameter which will have the newly generated internal
4709 *
4710 *
4711 * RETURN : Error code
4712 *
4713 *==========================================================================*/
4714int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4715{
4716 Mutex::Autolock lock(mRegistryLock);
4717 internalFrameNumber = _nextFreeInternalNumber++;
4718 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4719 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4720 purgeOldEntriesLocked();
4721 return NO_ERROR;
4722}
4723
4724/*===========================================================================
4725 * FUNCTION : getFrameworkFrameNumber
4726 *
4727 * DESCRIPTION: Method to query the framework framenumber given an internal #
4728 *
4729 * PARAMETERS :
4730 * @internalFrame#: Internal reference
4731 * @frameworkframenumber: Output parameter holding framework frame entry
4732 *
4733 * RETURN : Error code
4734 *
4735 *==========================================================================*/
4736int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4737 uint32_t &frameworkFrameNumber)
4738{
4739 Mutex::Autolock lock(mRegistryLock);
4740 auto itr = _register.find(internalFrameNumber);
4741 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004742 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004743 return -ENOENT;
4744 }
4745
4746 frameworkFrameNumber = itr->second;
4747 purgeOldEntriesLocked();
4748 return NO_ERROR;
4749}
Thierry Strudel3d639192016-09-09 11:52:26 -07004750
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004751status_t QCamera3HardwareInterface::fillPbStreamConfig(
4752 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4753 QCamera3Channel *channel, uint32_t streamIndex) {
4754 if (config == nullptr) {
4755 LOGE("%s: config is null", __FUNCTION__);
4756 return BAD_VALUE;
4757 }
4758
4759 if (channel == nullptr) {
4760 LOGE("%s: channel is null", __FUNCTION__);
4761 return BAD_VALUE;
4762 }
4763
4764 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4765 if (stream == nullptr) {
4766 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4767 return NAME_NOT_FOUND;
4768 }
4769
4770 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4771 if (streamInfo == nullptr) {
4772 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4773 return NAME_NOT_FOUND;
4774 }
4775
4776 config->id = pbStreamId;
4777 config->image.width = streamInfo->dim.width;
4778 config->image.height = streamInfo->dim.height;
4779 config->image.padding = 0;
4780 config->image.format = pbStreamFormat;
4781
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004782 uint32_t totalPlaneSize = 0;
4783
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004784 // Fill plane information.
4785 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4786 pbcamera::PlaneConfiguration plane;
4787 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4788 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4789 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004790
4791 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004792 }
4793
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004794 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004795 return OK;
4796}
4797
Thierry Strudel3d639192016-09-09 11:52:26 -07004798/*===========================================================================
4799 * FUNCTION : processCaptureRequest
4800 *
4801 * DESCRIPTION: process a capture request from camera service
4802 *
4803 * PARAMETERS :
4804 * @request : request from framework to process
4805 *
4806 * RETURN :
4807 *
4808 *==========================================================================*/
4809int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004810 camera3_capture_request_t *request,
4811 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004812{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004813 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004814 int rc = NO_ERROR;
4815 int32_t request_id;
4816 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004817 bool isVidBufRequested = false;
4818 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004819 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004820
4821 pthread_mutex_lock(&mMutex);
4822
4823 // Validate current state
4824 switch (mState) {
4825 case CONFIGURED:
4826 case STARTED:
4827 /* valid state */
4828 break;
4829
4830 case ERROR:
4831 pthread_mutex_unlock(&mMutex);
4832 handleCameraDeviceError();
4833 return -ENODEV;
4834
4835 default:
4836 LOGE("Invalid state %d", mState);
4837 pthread_mutex_unlock(&mMutex);
4838 return -ENODEV;
4839 }
4840
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004841 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004842 if (rc != NO_ERROR) {
4843 LOGE("incoming request is not valid");
4844 pthread_mutex_unlock(&mMutex);
4845 return rc;
4846 }
4847
4848 meta = request->settings;
4849
4850 // For first capture request, send capture intent, and
4851 // stream on all streams
4852 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004853 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004854 // send an unconfigure to the backend so that the isp
4855 // resources are deallocated
4856 if (!mFirstConfiguration) {
4857 cam_stream_size_info_t stream_config_info;
4858 int32_t hal_version = CAM_HAL_V3;
4859 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4860 stream_config_info.buffer_info.min_buffers =
4861 MIN_INFLIGHT_REQUESTS;
4862 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004863 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004864 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004865 clear_metadata_buffer(mParameters);
4866 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4867 CAM_INTF_PARM_HAL_VERSION, hal_version);
4868 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4869 CAM_INTF_META_STREAM_INFO, stream_config_info);
4870 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4871 mParameters);
4872 if (rc < 0) {
4873 LOGE("set_parms for unconfigure failed");
4874 pthread_mutex_unlock(&mMutex);
4875 return rc;
4876 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004877
Thierry Strudel3d639192016-09-09 11:52:26 -07004878 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004879 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004880 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004881 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004882 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004883 property_get("persist.camera.is_type", is_type_value, "4");
4884 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4885 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4886 property_get("persist.camera.is_type_preview", is_type_value, "4");
4887 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4888 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004889
4890 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4891 int32_t hal_version = CAM_HAL_V3;
4892 uint8_t captureIntent =
4893 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4894 mCaptureIntent = captureIntent;
4895 clear_metadata_buffer(mParameters);
4896 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4897 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4898 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004899 if (mFirstConfiguration) {
4900 // configure instant AEC
4901 // Instant AEC is a session based parameter and it is needed only
4902 // once per complete session after open camera.
4903 // i.e. This is set only once for the first capture request, after open camera.
4904 setInstantAEC(meta);
4905 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004906 uint8_t fwkVideoStabMode=0;
4907 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4908 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4909 }
4910
Xue Tuecac74e2017-04-17 13:58:15 -07004911 // If EIS setprop is enabled then only turn it on for video/preview
4912 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004913 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004914 int32_t vsMode;
4915 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4916 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4917 rc = BAD_VALUE;
4918 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004919 LOGD("setEis %d", setEis);
4920 bool eis3Supported = false;
4921 size_t count = IS_TYPE_MAX;
4922 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4923 for (size_t i = 0; i < count; i++) {
4924 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4925 eis3Supported = true;
4926 break;
4927 }
4928 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004929
4930 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004931 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004932 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4933 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004934 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4935 is_type = isTypePreview;
4936 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4937 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4938 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004939 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004940 } else {
4941 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004942 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004943 } else {
4944 is_type = IS_TYPE_NONE;
4945 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004946 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004947 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004948 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4949 }
4950 }
4951
4952 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4953 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4954
Thierry Strudel54dc9782017-02-15 12:12:10 -08004955 //Disable tintless only if the property is set to 0
4956 memset(prop, 0, sizeof(prop));
4957 property_get("persist.camera.tintless.enable", prop, "1");
4958 int32_t tintless_value = atoi(prop);
4959
Thierry Strudel3d639192016-09-09 11:52:26 -07004960 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4961 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004962
Thierry Strudel3d639192016-09-09 11:52:26 -07004963 //Disable CDS for HFR mode or if DIS/EIS is on.
4964 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4965 //after every configure_stream
4966 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4967 (m_bIsVideo)) {
4968 int32_t cds = CAM_CDS_MODE_OFF;
4969 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4970 CAM_INTF_PARM_CDS_MODE, cds))
4971 LOGE("Failed to disable CDS for HFR mode");
4972
4973 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004974
4975 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4976 uint8_t* use_av_timer = NULL;
4977
4978 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004979 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004980 use_av_timer = &m_debug_avtimer;
4981 }
4982 else{
4983 use_av_timer =
4984 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004985 if (use_av_timer) {
4986 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4987 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004988 }
4989
4990 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4991 rc = BAD_VALUE;
4992 }
4993 }
4994
Thierry Strudel3d639192016-09-09 11:52:26 -07004995 setMobicat();
4996
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004997 uint8_t nrMode = 0;
4998 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4999 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5000 }
5001
Thierry Strudel3d639192016-09-09 11:52:26 -07005002 /* Set fps and hfr mode while sending meta stream info so that sensor
5003 * can configure appropriate streaming mode */
5004 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005005 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5006 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005007 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5008 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005009 if (rc == NO_ERROR) {
5010 int32_t max_fps =
5011 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005012 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005013 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5014 }
5015 /* For HFR, more buffers are dequeued upfront to improve the performance */
5016 if (mBatchSize) {
5017 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5018 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5019 }
5020 }
5021 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005022 LOGE("setHalFpsRange failed");
5023 }
5024 }
5025 if (meta.exists(ANDROID_CONTROL_MODE)) {
5026 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5027 rc = extractSceneMode(meta, metaMode, mParameters);
5028 if (rc != NO_ERROR) {
5029 LOGE("extractSceneMode failed");
5030 }
5031 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005032 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005033
Thierry Strudel04e026f2016-10-10 11:27:36 -07005034 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5035 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5036 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5037 rc = setVideoHdrMode(mParameters, vhdr);
5038 if (rc != NO_ERROR) {
5039 LOGE("setVideoHDR is failed");
5040 }
5041 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005042
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005043 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005044 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005045 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005046 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5047 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5048 sensorModeFullFov)) {
5049 rc = BAD_VALUE;
5050 }
5051 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005052 //TODO: validate the arguments, HSV scenemode should have only the
5053 //advertised fps ranges
5054
5055 /*set the capture intent, hal version, tintless, stream info,
5056 *and disenable parameters to the backend*/
5057 LOGD("set_parms META_STREAM_INFO " );
5058 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005059 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5060 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005061 mStreamConfigInfo.type[i],
5062 mStreamConfigInfo.stream_sizes[i].width,
5063 mStreamConfigInfo.stream_sizes[i].height,
5064 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005065 mStreamConfigInfo.format[i],
5066 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005067 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005068
Thierry Strudel3d639192016-09-09 11:52:26 -07005069 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5070 mParameters);
5071 if (rc < 0) {
5072 LOGE("set_parms failed for hal version, stream info");
5073 }
5074
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005075 cam_sensor_mode_info_t sensorModeInfo = {};
5076 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005077 if (rc != NO_ERROR) {
5078 LOGE("Failed to get sensor output size");
5079 pthread_mutex_unlock(&mMutex);
5080 goto error_exit;
5081 }
5082
5083 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5084 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005085 sensorModeInfo.active_array_size.width,
5086 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005087
5088 /* Set batchmode before initializing channel. Since registerBuffer
5089 * internally initializes some of the channels, better set batchmode
5090 * even before first register buffer */
5091 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5092 it != mStreamInfo.end(); it++) {
5093 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5094 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5095 && mBatchSize) {
5096 rc = channel->setBatchSize(mBatchSize);
5097 //Disable per frame map unmap for HFR/batchmode case
5098 rc |= channel->setPerFrameMapUnmap(false);
5099 if (NO_ERROR != rc) {
5100 LOGE("Channel init failed %d", rc);
5101 pthread_mutex_unlock(&mMutex);
5102 goto error_exit;
5103 }
5104 }
5105 }
5106
5107 //First initialize all streams
5108 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5109 it != mStreamInfo.end(); it++) {
5110 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005111
5112 /* Initial value of NR mode is needed before stream on */
5113 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005114 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5115 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005116 setEis) {
5117 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5118 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5119 is_type = mStreamConfigInfo.is_type[i];
5120 break;
5121 }
5122 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005123 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005124 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005125 rc = channel->initialize(IS_TYPE_NONE);
5126 }
5127 if (NO_ERROR != rc) {
5128 LOGE("Channel initialization failed %d", rc);
5129 pthread_mutex_unlock(&mMutex);
5130 goto error_exit;
5131 }
5132 }
5133
5134 if (mRawDumpChannel) {
5135 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5136 if (rc != NO_ERROR) {
5137 LOGE("Error: Raw Dump Channel init failed");
5138 pthread_mutex_unlock(&mMutex);
5139 goto error_exit;
5140 }
5141 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005142 if (mHdrPlusRawSrcChannel) {
5143 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5144 if (rc != NO_ERROR) {
5145 LOGE("Error: HDR+ RAW Source Channel init failed");
5146 pthread_mutex_unlock(&mMutex);
5147 goto error_exit;
5148 }
5149 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005150 if (mSupportChannel) {
5151 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5152 if (rc < 0) {
5153 LOGE("Support channel initialization failed");
5154 pthread_mutex_unlock(&mMutex);
5155 goto error_exit;
5156 }
5157 }
5158 if (mAnalysisChannel) {
5159 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5160 if (rc < 0) {
5161 LOGE("Analysis channel initialization failed");
5162 pthread_mutex_unlock(&mMutex);
5163 goto error_exit;
5164 }
5165 }
5166 if (mDummyBatchChannel) {
5167 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5168 if (rc < 0) {
5169 LOGE("mDummyBatchChannel setBatchSize failed");
5170 pthread_mutex_unlock(&mMutex);
5171 goto error_exit;
5172 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005173 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005174 if (rc < 0) {
5175 LOGE("mDummyBatchChannel initialization failed");
5176 pthread_mutex_unlock(&mMutex);
5177 goto error_exit;
5178 }
5179 }
5180
5181 // Set bundle info
5182 rc = setBundleInfo();
5183 if (rc < 0) {
5184 LOGE("setBundleInfo failed %d", rc);
5185 pthread_mutex_unlock(&mMutex);
5186 goto error_exit;
5187 }
5188
5189 //update settings from app here
5190 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5191 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5192 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5193 }
5194 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5195 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5196 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5197 }
5198 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5199 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5200 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5201
5202 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5203 (mLinkedCameraId != mCameraId) ) {
5204 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5205 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005206 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005207 goto error_exit;
5208 }
5209 }
5210
5211 // add bundle related cameras
5212 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5213 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005214 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5215 &m_pDualCamCmdPtr->bundle_info;
5216 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005217 if (mIsDeviceLinked)
5218 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5219 else
5220 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5221
5222 pthread_mutex_lock(&gCamLock);
5223
5224 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5225 LOGE("Dualcam: Invalid Session Id ");
5226 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005227 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005228 goto error_exit;
5229 }
5230
5231 if (mIsMainCamera == 1) {
5232 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5233 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005234 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005235 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005236 // related session id should be session id of linked session
5237 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5238 } else {
5239 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5240 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005241 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005242 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005243 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5244 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005245 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005246 pthread_mutex_unlock(&gCamLock);
5247
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005248 rc = mCameraHandle->ops->set_dual_cam_cmd(
5249 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005250 if (rc < 0) {
5251 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005252 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005253 goto error_exit;
5254 }
5255 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005256 goto no_error;
5257error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005258 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005259 return rc;
5260no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005261 mWokenUpByDaemon = false;
5262 mPendingLiveRequest = 0;
5263 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005264 }
5265
5266 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005267 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005268
5269 if (mFlushPerf) {
5270 //we cannot accept any requests during flush
5271 LOGE("process_capture_request cannot proceed during flush");
5272 pthread_mutex_unlock(&mMutex);
5273 return NO_ERROR; //should return an error
5274 }
5275
5276 if (meta.exists(ANDROID_REQUEST_ID)) {
5277 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5278 mCurrentRequestId = request_id;
5279 LOGD("Received request with id: %d", request_id);
5280 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5281 LOGE("Unable to find request id field, \
5282 & no previous id available");
5283 pthread_mutex_unlock(&mMutex);
5284 return NAME_NOT_FOUND;
5285 } else {
5286 LOGD("Re-using old request id");
5287 request_id = mCurrentRequestId;
5288 }
5289
5290 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5291 request->num_output_buffers,
5292 request->input_buffer,
5293 frameNumber);
5294 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005295 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005296 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005297 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005298 uint32_t snapshotStreamId = 0;
5299 for (size_t i = 0; i < request->num_output_buffers; i++) {
5300 const camera3_stream_buffer_t& output = request->output_buffers[i];
5301 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5302
Emilian Peev7650c122017-01-19 08:24:33 -08005303 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5304 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005305 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005306 blob_request = 1;
5307 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5308 }
5309
5310 if (output.acquire_fence != -1) {
5311 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5312 close(output.acquire_fence);
5313 if (rc != OK) {
5314 LOGE("sync wait failed %d", rc);
5315 pthread_mutex_unlock(&mMutex);
5316 return rc;
5317 }
5318 }
5319
Emilian Peev0f3c3162017-03-15 12:57:46 +00005320 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5321 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005322 depthRequestPresent = true;
5323 continue;
5324 }
5325
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005326 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005327 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005328
5329 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5330 isVidBufRequested = true;
5331 }
5332 }
5333
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005334 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5335 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5336 itr++) {
5337 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5338 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5339 channel->getStreamID(channel->getStreamTypeMask());
5340
5341 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5342 isVidBufRequested = true;
5343 }
5344 }
5345
Thierry Strudel3d639192016-09-09 11:52:26 -07005346 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005347 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005348 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005349 }
5350 if (blob_request && mRawDumpChannel) {
5351 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005352 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005353 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005354 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005355 }
5356
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005357 {
5358 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5359 // Request a RAW buffer if
5360 // 1. mHdrPlusRawSrcChannel is valid.
5361 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5362 // 3. There is no pending HDR+ request.
5363 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5364 mHdrPlusPendingRequests.size() == 0) {
5365 streamsArray.stream_request[streamsArray.num_streams].streamID =
5366 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5367 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5368 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005369 }
5370
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005371 //extract capture intent
5372 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5373 mCaptureIntent =
5374 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5375 }
5376
5377 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5378 mCacMode =
5379 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5380 }
5381
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005382 uint8_t requestedLensShadingMapMode;
5383 // Get the shading map mode.
5384 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5385 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5386 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5387 } else {
5388 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5389 }
5390
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005391 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005392 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005393
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005394 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005395 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005396 // If this request has a still capture intent, try to submit an HDR+ request.
5397 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5398 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5399 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5400 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005401 }
5402
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005403 if (hdrPlusRequest) {
5404 // For a HDR+ request, just set the frame parameters.
5405 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5406 if (rc < 0) {
5407 LOGE("fail to set frame parameters");
5408 pthread_mutex_unlock(&mMutex);
5409 return rc;
5410 }
5411 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005412 /* Parse the settings:
5413 * - For every request in NORMAL MODE
5414 * - For every request in HFR mode during preview only case
5415 * - For first request of every batch in HFR mode during video
5416 * recording. In batchmode the same settings except frame number is
5417 * repeated in each request of the batch.
5418 */
5419 if (!mBatchSize ||
5420 (mBatchSize && !isVidBufRequested) ||
5421 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005422 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005423 if (rc < 0) {
5424 LOGE("fail to set frame parameters");
5425 pthread_mutex_unlock(&mMutex);
5426 return rc;
5427 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005428
5429 {
5430 // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
5431 // will be reported in result metadata.
5432 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5433 if (mHdrPlusModeEnabled) {
5434 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5435 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5436 }
5437 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005438 }
5439 /* For batchMode HFR, setFrameParameters is not called for every
5440 * request. But only frame number of the latest request is parsed.
5441 * Keep track of first and last frame numbers in a batch so that
5442 * metadata for the frame numbers of batch can be duplicated in
5443 * handleBatchMetadta */
5444 if (mBatchSize) {
5445 if (!mToBeQueuedVidBufs) {
5446 //start of the batch
5447 mFirstFrameNumberInBatch = request->frame_number;
5448 }
5449 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5450 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5451 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005452 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005453 return BAD_VALUE;
5454 }
5455 }
5456 if (mNeedSensorRestart) {
5457 /* Unlock the mutex as restartSensor waits on the channels to be
5458 * stopped, which in turn calls stream callback functions -
5459 * handleBufferWithLock and handleMetadataWithLock */
5460 pthread_mutex_unlock(&mMutex);
5461 rc = dynamicUpdateMetaStreamInfo();
5462 if (rc != NO_ERROR) {
5463 LOGE("Restarting the sensor failed");
5464 return BAD_VALUE;
5465 }
5466 mNeedSensorRestart = false;
5467 pthread_mutex_lock(&mMutex);
5468 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005469 if(mResetInstantAEC) {
5470 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5471 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5472 mResetInstantAEC = false;
5473 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005474 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005475 if (request->input_buffer->acquire_fence != -1) {
5476 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5477 close(request->input_buffer->acquire_fence);
5478 if (rc != OK) {
5479 LOGE("input buffer sync wait failed %d", rc);
5480 pthread_mutex_unlock(&mMutex);
5481 return rc;
5482 }
5483 }
5484 }
5485
5486 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5487 mLastCustIntentFrmNum = frameNumber;
5488 }
5489 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005490 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005491 pendingRequestIterator latestRequest;
5492 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005493 pendingRequest.num_buffers = depthRequestPresent ?
5494 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005495 pendingRequest.request_id = request_id;
5496 pendingRequest.blob_request = blob_request;
5497 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005498 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005499 if (request->input_buffer) {
5500 pendingRequest.input_buffer =
5501 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5502 *(pendingRequest.input_buffer) = *(request->input_buffer);
5503 pInputBuffer = pendingRequest.input_buffer;
5504 } else {
5505 pendingRequest.input_buffer = NULL;
5506 pInputBuffer = NULL;
5507 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005508 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005509
5510 pendingRequest.pipeline_depth = 0;
5511 pendingRequest.partial_result_cnt = 0;
5512 extractJpegMetadata(mCurJpegMeta, request);
5513 pendingRequest.jpegMetadata = mCurJpegMeta;
5514 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005515 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005516 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
Shuzhen Wang77b049a2017-08-30 12:24:36 -07005517 pendingRequest.hybrid_ae_enable =
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005518 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5519 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005520
Samuel Ha68ba5172016-12-15 18:41:12 -08005521 /* DevCamDebug metadata processCaptureRequest */
5522 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5523 mDevCamDebugMetaEnable =
5524 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5525 }
5526 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5527 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005528
5529 //extract CAC info
5530 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5531 mCacMode =
5532 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5533 }
5534 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005535 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005536 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5537 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005538
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005539 // extract enableZsl info
5540 if (gExposeEnableZslKey) {
5541 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5542 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5543 mZslEnabled = pendingRequest.enableZsl;
5544 } else {
5545 pendingRequest.enableZsl = mZslEnabled;
5546 }
5547 }
5548
Thierry Strudel3d639192016-09-09 11:52:26 -07005549 PendingBuffersInRequest bufsForCurRequest;
5550 bufsForCurRequest.frame_number = frameNumber;
5551 // Mark current timestamp for the new request
5552 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005553 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005554
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005555 if (hdrPlusRequest) {
5556 // Save settings for this request.
5557 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5558 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5559
5560 // Add to pending HDR+ request queue.
5561 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5562 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5563
5564 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5565 }
5566
Thierry Strudel3d639192016-09-09 11:52:26 -07005567 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005568 if ((request->output_buffers[i].stream->data_space ==
5569 HAL_DATASPACE_DEPTH) &&
5570 (HAL_PIXEL_FORMAT_BLOB ==
5571 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005572 continue;
5573 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005574 RequestedBufferInfo requestedBuf;
5575 memset(&requestedBuf, 0, sizeof(requestedBuf));
5576 requestedBuf.stream = request->output_buffers[i].stream;
5577 requestedBuf.buffer = NULL;
5578 pendingRequest.buffers.push_back(requestedBuf);
5579
5580 // Add to buffer handle the pending buffers list
5581 PendingBufferInfo bufferInfo;
5582 bufferInfo.buffer = request->output_buffers[i].buffer;
5583 bufferInfo.stream = request->output_buffers[i].stream;
5584 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5585 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5586 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5587 frameNumber, bufferInfo.buffer,
5588 channel->getStreamTypeMask(), bufferInfo.stream->format);
5589 }
5590 // Add this request packet into mPendingBuffersMap
5591 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5592 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5593 mPendingBuffersMap.get_num_overall_buffers());
5594
5595 latestRequest = mPendingRequestsList.insert(
5596 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005597
5598 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5599 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005600 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005601 for (size_t i = 0; i < request->num_output_buffers; i++) {
5602 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5603 }
5604
Thierry Strudel3d639192016-09-09 11:52:26 -07005605 if(mFlush) {
5606 LOGI("mFlush is true");
5607 pthread_mutex_unlock(&mMutex);
5608 return NO_ERROR;
5609 }
5610
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005611 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5612 // channel.
5613 if (!hdrPlusRequest) {
5614 int indexUsed;
5615 // Notify metadata channel we receive a request
5616 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005617
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005618 if(request->input_buffer != NULL){
5619 LOGD("Input request, frame_number %d", frameNumber);
5620 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5621 if (NO_ERROR != rc) {
5622 LOGE("fail to set reproc parameters");
5623 pthread_mutex_unlock(&mMutex);
5624 return rc;
5625 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005626 }
5627
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005628 // Call request on other streams
5629 uint32_t streams_need_metadata = 0;
5630 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5631 for (size_t i = 0; i < request->num_output_buffers; i++) {
5632 const camera3_stream_buffer_t& output = request->output_buffers[i];
5633 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5634
5635 if (channel == NULL) {
5636 LOGW("invalid channel pointer for stream");
5637 continue;
5638 }
5639
5640 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5641 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5642 output.buffer, request->input_buffer, frameNumber);
5643 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005644 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005645 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5646 if (rc < 0) {
5647 LOGE("Fail to request on picture channel");
5648 pthread_mutex_unlock(&mMutex);
5649 return rc;
5650 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005651 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005652 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5653 assert(NULL != mDepthChannel);
5654 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005655
Emilian Peev7650c122017-01-19 08:24:33 -08005656 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5657 if (rc < 0) {
5658 LOGE("Fail to map on depth buffer");
5659 pthread_mutex_unlock(&mMutex);
5660 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005661 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005662 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005663 } else {
5664 LOGD("snapshot request with buffer %p, frame_number %d",
5665 output.buffer, frameNumber);
5666 if (!request->settings) {
5667 rc = channel->request(output.buffer, frameNumber,
5668 NULL, mPrevParameters, indexUsed);
5669 } else {
5670 rc = channel->request(output.buffer, frameNumber,
5671 NULL, mParameters, indexUsed);
5672 }
5673 if (rc < 0) {
5674 LOGE("Fail to request on picture channel");
5675 pthread_mutex_unlock(&mMutex);
5676 return rc;
5677 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005678
Emilian Peev7650c122017-01-19 08:24:33 -08005679 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5680 uint32_t j = 0;
5681 for (j = 0; j < streamsArray.num_streams; j++) {
5682 if (streamsArray.stream_request[j].streamID == streamId) {
5683 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5684 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5685 else
5686 streamsArray.stream_request[j].buf_index = indexUsed;
5687 break;
5688 }
5689 }
5690 if (j == streamsArray.num_streams) {
5691 LOGE("Did not find matching stream to update index");
5692 assert(0);
5693 }
5694
5695 pendingBufferIter->need_metadata = true;
5696 streams_need_metadata++;
5697 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005698 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005699 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5700 bool needMetadata = false;
5701 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5702 rc = yuvChannel->request(output.buffer, frameNumber,
5703 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5704 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005705 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005706 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005707 pthread_mutex_unlock(&mMutex);
5708 return rc;
5709 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005710
5711 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5712 uint32_t j = 0;
5713 for (j = 0; j < streamsArray.num_streams; j++) {
5714 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005715 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5716 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5717 else
5718 streamsArray.stream_request[j].buf_index = indexUsed;
5719 break;
5720 }
5721 }
5722 if (j == streamsArray.num_streams) {
5723 LOGE("Did not find matching stream to update index");
5724 assert(0);
5725 }
5726
5727 pendingBufferIter->need_metadata = needMetadata;
5728 if (needMetadata)
5729 streams_need_metadata += 1;
5730 LOGD("calling YUV channel request, need_metadata is %d",
5731 needMetadata);
5732 } else {
5733 LOGD("request with buffer %p, frame_number %d",
5734 output.buffer, frameNumber);
5735
5736 rc = channel->request(output.buffer, frameNumber, indexUsed);
5737
5738 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5739 uint32_t j = 0;
5740 for (j = 0; j < streamsArray.num_streams; j++) {
5741 if (streamsArray.stream_request[j].streamID == streamId) {
5742 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5743 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5744 else
5745 streamsArray.stream_request[j].buf_index = indexUsed;
5746 break;
5747 }
5748 }
5749 if (j == streamsArray.num_streams) {
5750 LOGE("Did not find matching stream to update index");
5751 assert(0);
5752 }
5753
5754 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5755 && mBatchSize) {
5756 mToBeQueuedVidBufs++;
5757 if (mToBeQueuedVidBufs == mBatchSize) {
5758 channel->queueBatchBuf();
5759 }
5760 }
5761 if (rc < 0) {
5762 LOGE("request failed");
5763 pthread_mutex_unlock(&mMutex);
5764 return rc;
5765 }
5766 }
5767 pendingBufferIter++;
5768 }
5769
5770 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5771 itr++) {
5772 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5773
5774 if (channel == NULL) {
5775 LOGE("invalid channel pointer for stream");
5776 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005777 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005778 return BAD_VALUE;
5779 }
5780
5781 InternalRequest requestedStream;
5782 requestedStream = (*itr);
5783
5784
5785 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5786 LOGD("snapshot request internally input buffer %p, frame_number %d",
5787 request->input_buffer, frameNumber);
5788 if(request->input_buffer != NULL){
5789 rc = channel->request(NULL, frameNumber,
5790 pInputBuffer, &mReprocMeta, indexUsed, true,
5791 requestedStream.meteringOnly);
5792 if (rc < 0) {
5793 LOGE("Fail to request on picture channel");
5794 pthread_mutex_unlock(&mMutex);
5795 return rc;
5796 }
5797 } else {
5798 LOGD("snapshot request with frame_number %d", frameNumber);
5799 if (!request->settings) {
5800 rc = channel->request(NULL, frameNumber,
5801 NULL, mPrevParameters, indexUsed, true,
5802 requestedStream.meteringOnly);
5803 } else {
5804 rc = channel->request(NULL, frameNumber,
5805 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5806 }
5807 if (rc < 0) {
5808 LOGE("Fail to request on picture channel");
5809 pthread_mutex_unlock(&mMutex);
5810 return rc;
5811 }
5812
5813 if ((*itr).meteringOnly != 1) {
5814 requestedStream.need_metadata = 1;
5815 streams_need_metadata++;
5816 }
5817 }
5818
5819 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5820 uint32_t j = 0;
5821 for (j = 0; j < streamsArray.num_streams; j++) {
5822 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005823 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5824 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5825 else
5826 streamsArray.stream_request[j].buf_index = indexUsed;
5827 break;
5828 }
5829 }
5830 if (j == streamsArray.num_streams) {
5831 LOGE("Did not find matching stream to update index");
5832 assert(0);
5833 }
5834
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005835 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005836 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005837 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005838 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005839 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005840 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005841 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005842 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005843
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005844 //If 2 streams have need_metadata set to true, fail the request, unless
5845 //we copy/reference count the metadata buffer
5846 if (streams_need_metadata > 1) {
5847 LOGE("not supporting request in which two streams requires"
5848 " 2 HAL metadata for reprocessing");
5849 pthread_mutex_unlock(&mMutex);
5850 return -EINVAL;
5851 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005852
Emilian Peev656e4fa2017-06-02 16:47:04 +01005853 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5854 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5855 if (depthRequestPresent && mDepthChannel) {
5856 if (request->settings) {
5857 camera_metadata_ro_entry entry;
5858 if (find_camera_metadata_ro_entry(request->settings,
5859 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5860 if (entry.data.u8[0]) {
5861 pdafEnable = CAM_PD_DATA_ENABLED;
5862 } else {
5863 pdafEnable = CAM_PD_DATA_SKIP;
5864 }
5865 mDepthCloudMode = pdafEnable;
5866 } else {
5867 pdafEnable = mDepthCloudMode;
5868 }
5869 } else {
5870 pdafEnable = mDepthCloudMode;
5871 }
5872 }
5873
Emilian Peev7650c122017-01-19 08:24:33 -08005874 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5875 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5876 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5877 pthread_mutex_unlock(&mMutex);
5878 return BAD_VALUE;
5879 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005880
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005881 if (request->input_buffer == NULL) {
5882 /* Set the parameters to backend:
5883 * - For every request in NORMAL MODE
5884 * - For every request in HFR mode during preview only case
5885 * - Once every batch in HFR mode during video recording
5886 */
5887 if (!mBatchSize ||
5888 (mBatchSize && !isVidBufRequested) ||
5889 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5890 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5891 mBatchSize, isVidBufRequested,
5892 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005893
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005894 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5895 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5896 uint32_t m = 0;
5897 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5898 if (streamsArray.stream_request[k].streamID ==
5899 mBatchedStreamsArray.stream_request[m].streamID)
5900 break;
5901 }
5902 if (m == mBatchedStreamsArray.num_streams) {
5903 mBatchedStreamsArray.stream_request\
5904 [mBatchedStreamsArray.num_streams].streamID =
5905 streamsArray.stream_request[k].streamID;
5906 mBatchedStreamsArray.stream_request\
5907 [mBatchedStreamsArray.num_streams].buf_index =
5908 streamsArray.stream_request[k].buf_index;
5909 mBatchedStreamsArray.num_streams =
5910 mBatchedStreamsArray.num_streams + 1;
5911 }
5912 }
5913 streamsArray = mBatchedStreamsArray;
5914 }
5915 /* Update stream id of all the requested buffers */
5916 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5917 streamsArray)) {
5918 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005919 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005920 return BAD_VALUE;
5921 }
5922
5923 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5924 mParameters);
5925 if (rc < 0) {
5926 LOGE("set_parms failed");
5927 }
5928 /* reset to zero coz, the batch is queued */
5929 mToBeQueuedVidBufs = 0;
5930 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5931 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5932 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005933 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5934 uint32_t m = 0;
5935 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5936 if (streamsArray.stream_request[k].streamID ==
5937 mBatchedStreamsArray.stream_request[m].streamID)
5938 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005939 }
5940 if (m == mBatchedStreamsArray.num_streams) {
5941 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5942 streamID = streamsArray.stream_request[k].streamID;
5943 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5944 buf_index = streamsArray.stream_request[k].buf_index;
5945 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5946 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005947 }
5948 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005949 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005950
5951 // Start all streams after the first setting is sent, so that the
5952 // setting can be applied sooner: (0 + apply_delay)th frame.
5953 if (mState == CONFIGURED && mChannelHandle) {
5954 //Then start them.
5955 LOGH("Start META Channel");
5956 rc = mMetadataChannel->start();
5957 if (rc < 0) {
5958 LOGE("META channel start failed");
5959 pthread_mutex_unlock(&mMutex);
5960 return rc;
5961 }
5962
5963 if (mAnalysisChannel) {
5964 rc = mAnalysisChannel->start();
5965 if (rc < 0) {
5966 LOGE("Analysis channel start failed");
5967 mMetadataChannel->stop();
5968 pthread_mutex_unlock(&mMutex);
5969 return rc;
5970 }
5971 }
5972
5973 if (mSupportChannel) {
5974 rc = mSupportChannel->start();
5975 if (rc < 0) {
5976 LOGE("Support channel start failed");
5977 mMetadataChannel->stop();
5978 /* Although support and analysis are mutually exclusive today
5979 adding it in anycase for future proofing */
5980 if (mAnalysisChannel) {
5981 mAnalysisChannel->stop();
5982 }
5983 pthread_mutex_unlock(&mMutex);
5984 return rc;
5985 }
5986 }
5987 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5988 it != mStreamInfo.end(); it++) {
5989 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5990 LOGH("Start Processing Channel mask=%d",
5991 channel->getStreamTypeMask());
5992 rc = channel->start();
5993 if (rc < 0) {
5994 LOGE("channel start failed");
5995 pthread_mutex_unlock(&mMutex);
5996 return rc;
5997 }
5998 }
5999
6000 if (mRawDumpChannel) {
6001 LOGD("Starting raw dump stream");
6002 rc = mRawDumpChannel->start();
6003 if (rc != NO_ERROR) {
6004 LOGE("Error Starting Raw Dump Channel");
6005 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6006 it != mStreamInfo.end(); it++) {
6007 QCamera3Channel *channel =
6008 (QCamera3Channel *)(*it)->stream->priv;
6009 LOGH("Stopping Processing Channel mask=%d",
6010 channel->getStreamTypeMask());
6011 channel->stop();
6012 }
6013 if (mSupportChannel)
6014 mSupportChannel->stop();
6015 if (mAnalysisChannel) {
6016 mAnalysisChannel->stop();
6017 }
6018 mMetadataChannel->stop();
6019 pthread_mutex_unlock(&mMutex);
6020 return rc;
6021 }
6022 }
6023
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006024 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006025 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006026 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006027 if (rc != NO_ERROR) {
6028 LOGE("start_channel failed %d", rc);
6029 pthread_mutex_unlock(&mMutex);
6030 return rc;
6031 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006032
6033 {
6034 // Configure Easel for stream on.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006035 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07006036
6037 // Now that sensor mode should have been selected, get the selected sensor mode
6038 // info.
6039 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6040 getCurrentSensorModeInfo(mSensorModeInfo);
6041
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006042 if (EaselManagerClientOpened) {
6043 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006044 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6045 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006046 if (rc != OK) {
6047 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6048 mCameraId, mSensorModeInfo.op_pixel_clk);
6049 pthread_mutex_unlock(&mMutex);
6050 return rc;
6051 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07006052 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006053 }
6054 }
6055
6056 // Start sensor streaming.
6057 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6058 mChannelHandle);
6059 if (rc != NO_ERROR) {
6060 LOGE("start_sensor_stream_on failed %d", rc);
6061 pthread_mutex_unlock(&mMutex);
6062 return rc;
6063 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006064 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006065 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006066 }
6067
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006068 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chenjie Luo4a761802017-06-13 17:35:54 +00006069 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006070 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006071 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006072 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6073 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6074 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6075 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006076
6077 if (isSessionHdrPlusModeCompatible()) {
6078 rc = enableHdrPlusModeLocked();
6079 if (rc != OK) {
6080 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6081 pthread_mutex_unlock(&mMutex);
6082 return rc;
6083 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006084 }
6085
6086 mFirstPreviewIntentSeen = true;
6087 }
6088 }
6089
Thierry Strudel3d639192016-09-09 11:52:26 -07006090 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6091
6092 mState = STARTED;
6093 // Added a timed condition wait
6094 struct timespec ts;
6095 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006096 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006097 if (rc < 0) {
6098 isValidTimeout = 0;
6099 LOGE("Error reading the real time clock!!");
6100 }
6101 else {
6102 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006103 int64_t timeout = 5;
6104 {
6105 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6106 // If there is a pending HDR+ request, the following requests may be blocked until the
6107 // HDR+ request is done. So allow a longer timeout.
6108 if (mHdrPlusPendingRequests.size() > 0) {
6109 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6110 }
6111 }
6112 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006113 }
6114 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006115 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006116 (mState != ERROR) && (mState != DEINIT)) {
6117 if (!isValidTimeout) {
6118 LOGD("Blocking on conditional wait");
6119 pthread_cond_wait(&mRequestCond, &mMutex);
6120 }
6121 else {
6122 LOGD("Blocking on timed conditional wait");
6123 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6124 if (rc == ETIMEDOUT) {
6125 rc = -ENODEV;
6126 LOGE("Unblocked on timeout!!!!");
6127 break;
6128 }
6129 }
6130 LOGD("Unblocked");
6131 if (mWokenUpByDaemon) {
6132 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006133 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006134 break;
6135 }
6136 }
6137 pthread_mutex_unlock(&mMutex);
6138
6139 return rc;
6140}
6141
6142/*===========================================================================
6143 * FUNCTION : dump
6144 *
6145 * DESCRIPTION:
6146 *
6147 * PARAMETERS :
6148 *
6149 *
6150 * RETURN :
6151 *==========================================================================*/
6152void QCamera3HardwareInterface::dump(int fd)
6153{
6154 pthread_mutex_lock(&mMutex);
6155 dprintf(fd, "\n Camera HAL3 information Begin \n");
6156
6157 dprintf(fd, "\nNumber of pending requests: %zu \n",
6158 mPendingRequestsList.size());
6159 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6160 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6161 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6162 for(pendingRequestIterator i = mPendingRequestsList.begin();
6163 i != mPendingRequestsList.end(); i++) {
6164 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6165 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6166 i->input_buffer);
6167 }
6168 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6169 mPendingBuffersMap.get_num_overall_buffers());
6170 dprintf(fd, "-------+------------------\n");
6171 dprintf(fd, " Frame | Stream type mask \n");
6172 dprintf(fd, "-------+------------------\n");
6173 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6174 for(auto &j : req.mPendingBufferList) {
6175 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6176 dprintf(fd, " %5d | %11d \n",
6177 req.frame_number, channel->getStreamTypeMask());
6178 }
6179 }
6180 dprintf(fd, "-------+------------------\n");
6181
6182 dprintf(fd, "\nPending frame drop list: %zu\n",
6183 mPendingFrameDropList.size());
6184 dprintf(fd, "-------+-----------\n");
6185 dprintf(fd, " Frame | Stream ID \n");
6186 dprintf(fd, "-------+-----------\n");
6187 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6188 i != mPendingFrameDropList.end(); i++) {
6189 dprintf(fd, " %5d | %9d \n",
6190 i->frame_number, i->stream_ID);
6191 }
6192 dprintf(fd, "-------+-----------\n");
6193
6194 dprintf(fd, "\n Camera HAL3 information End \n");
6195
6196 /* use dumpsys media.camera as trigger to send update debug level event */
6197 mUpdateDebugLevel = true;
6198 pthread_mutex_unlock(&mMutex);
6199 return;
6200}
6201
6202/*===========================================================================
6203 * FUNCTION : flush
6204 *
6205 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6206 * conditionally restarts channels
6207 *
6208 * PARAMETERS :
6209 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006210 * @ stopChannelImmediately: stop the channel immediately. This should be used
6211 * when device encountered an error and MIPI may has
6212 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006213 *
6214 * RETURN :
6215 * 0 on success
6216 * Error code on failure
6217 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006218int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006219{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006220 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006221 int32_t rc = NO_ERROR;
6222
6223 LOGD("Unblocking Process Capture Request");
6224 pthread_mutex_lock(&mMutex);
6225 mFlush = true;
6226 pthread_mutex_unlock(&mMutex);
6227
6228 rc = stopAllChannels();
6229 // unlink of dualcam
6230 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006231 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6232 &m_pDualCamCmdPtr->bundle_info;
6233 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006234 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6235 pthread_mutex_lock(&gCamLock);
6236
6237 if (mIsMainCamera == 1) {
6238 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6239 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006240 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006241 // related session id should be session id of linked session
6242 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6243 } else {
6244 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6245 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006246 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006247 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6248 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006249 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006250 pthread_mutex_unlock(&gCamLock);
6251
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006252 rc = mCameraHandle->ops->set_dual_cam_cmd(
6253 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006254 if (rc < 0) {
6255 LOGE("Dualcam: Unlink failed, but still proceed to close");
6256 }
6257 }
6258
6259 if (rc < 0) {
6260 LOGE("stopAllChannels failed");
6261 return rc;
6262 }
6263 if (mChannelHandle) {
6264 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006265 mChannelHandle, stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006266 }
6267
6268 // Reset bundle info
6269 rc = setBundleInfo();
6270 if (rc < 0) {
6271 LOGE("setBundleInfo failed %d", rc);
6272 return rc;
6273 }
6274
6275 // Mutex Lock
6276 pthread_mutex_lock(&mMutex);
6277
6278 // Unblock process_capture_request
6279 mPendingLiveRequest = 0;
6280 pthread_cond_signal(&mRequestCond);
6281
6282 rc = notifyErrorForPendingRequests();
6283 if (rc < 0) {
6284 LOGE("notifyErrorForPendingRequests failed");
6285 pthread_mutex_unlock(&mMutex);
6286 return rc;
6287 }
6288
6289 mFlush = false;
6290
6291 // Start the Streams/Channels
6292 if (restartChannels) {
6293 rc = startAllChannels();
6294 if (rc < 0) {
6295 LOGE("startAllChannels failed");
6296 pthread_mutex_unlock(&mMutex);
6297 return rc;
6298 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006299 if (mChannelHandle) {
6300 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006301 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006302 if (rc < 0) {
6303 LOGE("start_channel failed");
6304 pthread_mutex_unlock(&mMutex);
6305 return rc;
6306 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006307 }
6308 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006309 pthread_mutex_unlock(&mMutex);
6310
6311 return 0;
6312}
6313
6314/*===========================================================================
6315 * FUNCTION : flushPerf
6316 *
6317 * DESCRIPTION: This is the performance optimization version of flush that does
6318 * not use stream off, rather flushes the system
6319 *
6320 * PARAMETERS :
6321 *
6322 *
6323 * RETURN : 0 : success
6324 * -EINVAL: input is malformed (device is not valid)
6325 * -ENODEV: if the device has encountered a serious error
6326 *==========================================================================*/
6327int QCamera3HardwareInterface::flushPerf()
6328{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006329 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006330 int32_t rc = 0;
6331 struct timespec timeout;
6332 bool timed_wait = false;
6333
6334 pthread_mutex_lock(&mMutex);
6335 mFlushPerf = true;
6336 mPendingBuffersMap.numPendingBufsAtFlush =
6337 mPendingBuffersMap.get_num_overall_buffers();
6338 LOGD("Calling flush. Wait for %d buffers to return",
6339 mPendingBuffersMap.numPendingBufsAtFlush);
6340
6341 /* send the flush event to the backend */
6342 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6343 if (rc < 0) {
6344 LOGE("Error in flush: IOCTL failure");
6345 mFlushPerf = false;
6346 pthread_mutex_unlock(&mMutex);
6347 return -ENODEV;
6348 }
6349
6350 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6351 LOGD("No pending buffers in HAL, return flush");
6352 mFlushPerf = false;
6353 pthread_mutex_unlock(&mMutex);
6354 return rc;
6355 }
6356
6357 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006358 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006359 if (rc < 0) {
6360 LOGE("Error reading the real time clock, cannot use timed wait");
6361 } else {
6362 timeout.tv_sec += FLUSH_TIMEOUT;
6363 timed_wait = true;
6364 }
6365
6366 //Block on conditional variable
6367 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6368 LOGD("Waiting on mBuffersCond");
6369 if (!timed_wait) {
6370 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6371 if (rc != 0) {
6372 LOGE("pthread_cond_wait failed due to rc = %s",
6373 strerror(rc));
6374 break;
6375 }
6376 } else {
6377 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6378 if (rc != 0) {
6379 LOGE("pthread_cond_timedwait failed due to rc = %s",
6380 strerror(rc));
6381 break;
6382 }
6383 }
6384 }
6385 if (rc != 0) {
6386 mFlushPerf = false;
6387 pthread_mutex_unlock(&mMutex);
6388 return -ENODEV;
6389 }
6390
6391 LOGD("Received buffers, now safe to return them");
6392
6393 //make sure the channels handle flush
6394 //currently only required for the picture channel to release snapshot resources
6395 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6396 it != mStreamInfo.end(); it++) {
6397 QCamera3Channel *channel = (*it)->channel;
6398 if (channel) {
6399 rc = channel->flush();
6400 if (rc) {
6401 LOGE("Flushing the channels failed with error %d", rc);
6402 // even though the channel flush failed we need to continue and
6403 // return the buffers we have to the framework, however the return
6404 // value will be an error
6405 rc = -ENODEV;
6406 }
6407 }
6408 }
6409
6410 /* notify the frameworks and send errored results */
6411 rc = notifyErrorForPendingRequests();
6412 if (rc < 0) {
6413 LOGE("notifyErrorForPendingRequests failed");
6414 pthread_mutex_unlock(&mMutex);
6415 return rc;
6416 }
6417
6418 //unblock process_capture_request
6419 mPendingLiveRequest = 0;
6420 unblockRequestIfNecessary();
6421
6422 mFlushPerf = false;
6423 pthread_mutex_unlock(&mMutex);
6424 LOGD ("Flush Operation complete. rc = %d", rc);
6425 return rc;
6426}
6427
6428/*===========================================================================
6429 * FUNCTION : handleCameraDeviceError
6430 *
6431 * DESCRIPTION: This function calls internal flush and notifies the error to
6432 * framework and updates the state variable.
6433 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006434 * PARAMETERS :
6435 * @stopChannelImmediately : stop channels immediately without waiting for
6436 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006437 *
6438 * RETURN : NO_ERROR on Success
6439 * Error code on failure
6440 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006441int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006442{
6443 int32_t rc = NO_ERROR;
6444
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006445 {
6446 Mutex::Autolock lock(mFlushLock);
6447 pthread_mutex_lock(&mMutex);
6448 if (mState != ERROR) {
6449 //if mState != ERROR, nothing to be done
6450 pthread_mutex_unlock(&mMutex);
6451 return NO_ERROR;
6452 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006453 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006454
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006455 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006456 if (NO_ERROR != rc) {
6457 LOGE("internal flush to handle mState = ERROR failed");
6458 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006459
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006460 pthread_mutex_lock(&mMutex);
6461 mState = DEINIT;
6462 pthread_mutex_unlock(&mMutex);
6463 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006464
6465 camera3_notify_msg_t notify_msg;
6466 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6467 notify_msg.type = CAMERA3_MSG_ERROR;
6468 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6469 notify_msg.message.error.error_stream = NULL;
6470 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006471 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006472
6473 return rc;
6474}
6475
6476/*===========================================================================
6477 * FUNCTION : captureResultCb
6478 *
6479 * DESCRIPTION: Callback handler for all capture result
6480 * (streams, as well as metadata)
6481 *
6482 * PARAMETERS :
6483 * @metadata : metadata information
6484 * @buffer : actual gralloc buffer to be returned to frameworks.
6485 * NULL if metadata.
6486 *
6487 * RETURN : NONE
6488 *==========================================================================*/
6489void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6490 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6491{
6492 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006493 pthread_mutex_lock(&mMutex);
6494 uint8_t batchSize = mBatchSize;
6495 pthread_mutex_unlock(&mMutex);
6496 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006497 handleBatchMetadata(metadata_buf,
6498 true /* free_and_bufdone_meta_buf */);
6499 } else { /* mBatchSize = 0 */
6500 hdrPlusPerfLock(metadata_buf);
6501 pthread_mutex_lock(&mMutex);
6502 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006503 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006504 true /* last urgent frame of batch metadata */,
6505 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006506 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006507 pthread_mutex_unlock(&mMutex);
6508 }
6509 } else if (isInputBuffer) {
6510 pthread_mutex_lock(&mMutex);
6511 handleInputBufferWithLock(frame_number);
6512 pthread_mutex_unlock(&mMutex);
6513 } else {
6514 pthread_mutex_lock(&mMutex);
6515 handleBufferWithLock(buffer, frame_number);
6516 pthread_mutex_unlock(&mMutex);
6517 }
6518 return;
6519}
6520
6521/*===========================================================================
6522 * FUNCTION : getReprocessibleOutputStreamId
6523 *
6524 * DESCRIPTION: Get source output stream id for the input reprocess stream
6525 * based on size and format, which would be the largest
6526 * output stream if an input stream exists.
6527 *
6528 * PARAMETERS :
6529 * @id : return the stream id if found
6530 *
6531 * RETURN : int32_t type of status
6532 * NO_ERROR -- success
6533 * none-zero failure code
6534 *==========================================================================*/
6535int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6536{
6537 /* check if any output or bidirectional stream with the same size and format
6538 and return that stream */
6539 if ((mInputStreamInfo.dim.width > 0) &&
6540 (mInputStreamInfo.dim.height > 0)) {
6541 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6542 it != mStreamInfo.end(); it++) {
6543
6544 camera3_stream_t *stream = (*it)->stream;
6545 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6546 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6547 (stream->format == mInputStreamInfo.format)) {
6548 // Usage flag for an input stream and the source output stream
6549 // may be different.
6550 LOGD("Found reprocessible output stream! %p", *it);
6551 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6552 stream->usage, mInputStreamInfo.usage);
6553
6554 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6555 if (channel != NULL && channel->mStreams[0]) {
6556 id = channel->mStreams[0]->getMyServerID();
6557 return NO_ERROR;
6558 }
6559 }
6560 }
6561 } else {
6562 LOGD("No input stream, so no reprocessible output stream");
6563 }
6564 return NAME_NOT_FOUND;
6565}
6566
6567/*===========================================================================
6568 * FUNCTION : lookupFwkName
6569 *
6570 * DESCRIPTION: In case the enum is not same in fwk and backend
6571 * make sure the parameter is correctly propogated
6572 *
6573 * PARAMETERS :
6574 * @arr : map between the two enums
6575 * @len : len of the map
6576 * @hal_name : name of the hal_parm to map
6577 *
6578 * RETURN : int type of status
6579 * fwk_name -- success
6580 * none-zero failure code
6581 *==========================================================================*/
6582template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6583 size_t len, halType hal_name)
6584{
6585
6586 for (size_t i = 0; i < len; i++) {
6587 if (arr[i].hal_name == hal_name) {
6588 return arr[i].fwk_name;
6589 }
6590 }
6591
6592 /* Not able to find matching framework type is not necessarily
6593 * an error case. This happens when mm-camera supports more attributes
6594 * than the frameworks do */
6595 LOGH("Cannot find matching framework type");
6596 return NAME_NOT_FOUND;
6597}
6598
6599/*===========================================================================
6600 * FUNCTION : lookupHalName
6601 *
6602 * DESCRIPTION: In case the enum is not same in fwk and backend
6603 * make sure the parameter is correctly propogated
6604 *
6605 * PARAMETERS :
6606 * @arr : map between the two enums
6607 * @len : len of the map
6608 * @fwk_name : name of the hal_parm to map
6609 *
6610 * RETURN : int32_t type of status
6611 * hal_name -- success
6612 * none-zero failure code
6613 *==========================================================================*/
6614template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6615 size_t len, fwkType fwk_name)
6616{
6617 for (size_t i = 0; i < len; i++) {
6618 if (arr[i].fwk_name == fwk_name) {
6619 return arr[i].hal_name;
6620 }
6621 }
6622
6623 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6624 return NAME_NOT_FOUND;
6625}
6626
6627/*===========================================================================
6628 * FUNCTION : lookupProp
6629 *
6630 * DESCRIPTION: lookup a value by its name
6631 *
6632 * PARAMETERS :
6633 * @arr : map between the two enums
6634 * @len : size of the map
6635 * @name : name to be looked up
6636 *
6637 * RETURN : Value if found
6638 * CAM_CDS_MODE_MAX if not found
6639 *==========================================================================*/
6640template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6641 size_t len, const char *name)
6642{
6643 if (name) {
6644 for (size_t i = 0; i < len; i++) {
6645 if (!strcmp(arr[i].desc, name)) {
6646 return arr[i].val;
6647 }
6648 }
6649 }
6650 return CAM_CDS_MODE_MAX;
6651}
6652
6653/*===========================================================================
6654 *
6655 * DESCRIPTION:
6656 *
6657 * PARAMETERS :
6658 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006659 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006660 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006661 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6662 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006663 *
6664 * RETURN : camera_metadata_t*
6665 * metadata in a format specified by fwk
6666 *==========================================================================*/
6667camera_metadata_t*
6668QCamera3HardwareInterface::translateFromHalMetadata(
6669 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006670 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006671 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006672 bool lastMetadataInBatch,
6673 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006674{
6675 CameraMetadata camMetadata;
6676 camera_metadata_t *resultMetadata;
6677
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006678 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006679 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6680 * Timestamp is needed because it's used for shutter notify calculation.
6681 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006682 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006683 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006684 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006685 }
6686
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006687 if (pendingRequest.jpegMetadata.entryCount())
6688 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006689
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006690 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6691 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6692 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6693 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6694 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006695 if (mBatchSize == 0) {
6696 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006697 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006698 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006699
Samuel Ha68ba5172016-12-15 18:41:12 -08006700 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6701 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006702 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006703 // DevCamDebug metadata translateFromHalMetadata AF
6704 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6705 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6706 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6707 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6708 }
6709 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6710 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6711 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6712 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6713 }
6714 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6715 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6716 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6717 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6718 }
6719 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6720 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6721 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6722 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6723 }
6724 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6725 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6726 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6727 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6728 }
6729 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6730 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6731 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6732 *DevCamDebug_af_monitor_pdaf_target_pos;
6733 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6734 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6735 }
6736 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6737 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6738 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6739 *DevCamDebug_af_monitor_pdaf_confidence;
6740 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6741 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6742 }
6743 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6744 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6745 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6746 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6747 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6748 }
6749 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6750 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6751 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6752 *DevCamDebug_af_monitor_tof_target_pos;
6753 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6754 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6755 }
6756 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6757 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6758 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6759 *DevCamDebug_af_monitor_tof_confidence;
6760 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6761 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6762 }
6763 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6764 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6765 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6766 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6767 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6768 }
6769 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6770 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6771 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6772 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6773 &fwk_DevCamDebug_af_monitor_type_select, 1);
6774 }
6775 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6776 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6777 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6778 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6779 &fwk_DevCamDebug_af_monitor_refocus, 1);
6780 }
6781 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6782 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6783 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6784 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6785 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6786 }
6787 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6788 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6789 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6790 *DevCamDebug_af_search_pdaf_target_pos;
6791 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6792 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6793 }
6794 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6795 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6796 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6797 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6798 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6799 }
6800 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6801 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6802 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6803 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6804 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6805 }
6806 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6807 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6808 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6809 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6810 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6811 }
6812 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6813 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6814 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6815 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6816 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6817 }
6818 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6819 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6820 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6821 *DevCamDebug_af_search_tof_target_pos;
6822 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6823 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6824 }
6825 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6826 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6827 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6828 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6829 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6830 }
6831 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6832 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6833 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6834 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6835 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6836 }
6837 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6838 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6839 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6840 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6841 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6842 }
6843 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6844 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6845 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6846 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6847 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6848 }
6849 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6850 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6851 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6852 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6853 &fwk_DevCamDebug_af_search_type_select, 1);
6854 }
6855 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6856 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6857 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6858 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6859 &fwk_DevCamDebug_af_search_next_pos, 1);
6860 }
6861 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6862 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6863 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6864 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6865 &fwk_DevCamDebug_af_search_target_pos, 1);
6866 }
6867 // DevCamDebug metadata translateFromHalMetadata AEC
6868 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6869 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6870 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6871 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6872 }
6873 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6874 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6875 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6876 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6877 }
6878 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6879 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6880 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6881 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6882 }
6883 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6884 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6885 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6886 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6887 }
6888 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6889 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6890 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6891 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6892 }
6893 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6894 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6895 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6896 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6897 }
6898 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6899 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6900 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6901 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6902 }
6903 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6904 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6905 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6906 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6907 }
Samuel Ha34229982017-02-17 13:51:11 -08006908 // DevCamDebug metadata translateFromHalMetadata zzHDR
6909 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6910 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6911 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6912 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6913 }
6914 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6915 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006916 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006917 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6918 }
6919 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6920 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6921 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6922 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6923 }
6924 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6925 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006926 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006927 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6928 }
6929 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6930 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6931 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6932 *DevCamDebug_aec_hdr_sensitivity_ratio;
6933 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6934 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6935 }
6936 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6937 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6938 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6939 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6940 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6941 }
6942 // DevCamDebug metadata translateFromHalMetadata ADRC
6943 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6944 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6945 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6946 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6947 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6948 }
6949 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6950 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6951 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6952 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6953 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6954 }
6955 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6956 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6957 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6958 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6959 }
6960 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6961 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6962 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6963 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6964 }
6965 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6966 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6967 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6968 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6969 }
6970 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6971 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6972 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6973 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6974 }
Samuel Habdf4fac2017-07-28 17:21:18 -07006975 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
6976 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
6977 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
6978 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
6979 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
6980 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
6981 }
6982 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
6983 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
6984 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
6985 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
6986 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
6987 }
6988 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
6989 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
6990 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
6991 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
6992 &fwk_DevCamDebug_aec_subject_motion, 1);
6993 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006994 // DevCamDebug metadata translateFromHalMetadata AWB
6995 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6996 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6997 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6998 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6999 }
7000 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7001 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7002 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7003 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7004 }
7005 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7006 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7007 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7008 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7009 }
7010 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7011 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7012 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7013 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7014 }
7015 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7016 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7017 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7018 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7019 }
7020 }
7021 // atrace_end(ATRACE_TAG_ALWAYS);
7022
Thierry Strudel3d639192016-09-09 11:52:26 -07007023 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7024 int64_t fwk_frame_number = *frame_number;
7025 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7026 }
7027
7028 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7029 int32_t fps_range[2];
7030 fps_range[0] = (int32_t)float_range->min_fps;
7031 fps_range[1] = (int32_t)float_range->max_fps;
7032 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7033 fps_range, 2);
7034 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7035 fps_range[0], fps_range[1]);
7036 }
7037
7038 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7039 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7040 }
7041
7042 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7043 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7044 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7045 *sceneMode);
7046 if (NAME_NOT_FOUND != val) {
7047 uint8_t fwkSceneMode = (uint8_t)val;
7048 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7049 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7050 fwkSceneMode);
7051 }
7052 }
7053
7054 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7055 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7056 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7057 }
7058
7059 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7060 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7061 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7062 }
7063
7064 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7065 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7066 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7067 }
7068
7069 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7070 CAM_INTF_META_EDGE_MODE, metadata) {
7071 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7072 }
7073
7074 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7075 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7076 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7077 }
7078
7079 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7080 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7081 }
7082
7083 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7084 if (0 <= *flashState) {
7085 uint8_t fwk_flashState = (uint8_t) *flashState;
7086 if (!gCamCapability[mCameraId]->flash_available) {
7087 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7088 }
7089 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7090 }
7091 }
7092
7093 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7094 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7095 if (NAME_NOT_FOUND != val) {
7096 uint8_t fwk_flashMode = (uint8_t)val;
7097 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7098 }
7099 }
7100
7101 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7102 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7103 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7104 }
7105
7106 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7107 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7108 }
7109
7110 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7111 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7112 }
7113
7114 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7115 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7116 }
7117
7118 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7119 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7120 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7121 }
7122
7123 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7124 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7125 LOGD("fwk_videoStab = %d", fwk_videoStab);
7126 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7127 } else {
7128 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7129 // and so hardcoding the Video Stab result to OFF mode.
7130 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7131 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007132 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007133 }
7134
7135 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7136 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7137 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7138 }
7139
7140 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7141 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7142 }
7143
Thierry Strudel3d639192016-09-09 11:52:26 -07007144 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7145 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007146 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007147
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007148 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7149 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007150
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007151 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007152 blackLevelAppliedPattern->cam_black_level[0],
7153 blackLevelAppliedPattern->cam_black_level[1],
7154 blackLevelAppliedPattern->cam_black_level[2],
7155 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007156 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7157 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007158
7159#ifndef USE_HAL_3_3
7160 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307161 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007162 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307163 fwk_blackLevelInd[0] /= 16.0;
7164 fwk_blackLevelInd[1] /= 16.0;
7165 fwk_blackLevelInd[2] /= 16.0;
7166 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007167 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7168 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007169#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007170 }
7171
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007172#ifndef USE_HAL_3_3
7173 // Fixed whitelevel is used by ISP/Sensor
7174 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7175 &gCamCapability[mCameraId]->white_level, 1);
7176#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007177
7178 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7179 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7180 int32_t scalerCropRegion[4];
7181 scalerCropRegion[0] = hScalerCropRegion->left;
7182 scalerCropRegion[1] = hScalerCropRegion->top;
7183 scalerCropRegion[2] = hScalerCropRegion->width;
7184 scalerCropRegion[3] = hScalerCropRegion->height;
7185
7186 // Adjust crop region from sensor output coordinate system to active
7187 // array coordinate system.
7188 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7189 scalerCropRegion[2], scalerCropRegion[3]);
7190
7191 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7192 }
7193
7194 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7195 LOGD("sensorExpTime = %lld", *sensorExpTime);
7196 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7197 }
7198
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007199 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7200 LOGD("expTimeBoost = %f", *expTimeBoost);
7201 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7202 }
7203
Thierry Strudel3d639192016-09-09 11:52:26 -07007204 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7205 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7206 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7207 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7208 }
7209
7210 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7211 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7212 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7213 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7214 sensorRollingShutterSkew, 1);
7215 }
7216
7217 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7218 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7219 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7220
7221 //calculate the noise profile based on sensitivity
7222 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7223 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7224 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7225 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7226 noise_profile[i] = noise_profile_S;
7227 noise_profile[i+1] = noise_profile_O;
7228 }
7229 LOGD("noise model entry (S, O) is (%f, %f)",
7230 noise_profile_S, noise_profile_O);
7231 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7232 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7233 }
7234
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007235#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007236 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007237 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007238 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007239 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007240 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7241 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7242 }
7243 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007244#endif
7245
Thierry Strudel3d639192016-09-09 11:52:26 -07007246 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7247 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7248 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7249 }
7250
7251 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7252 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7253 *faceDetectMode);
7254 if (NAME_NOT_FOUND != val) {
7255 uint8_t fwk_faceDetectMode = (uint8_t)val;
7256 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7257
7258 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7259 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7260 CAM_INTF_META_FACE_DETECTION, metadata) {
7261 uint8_t numFaces = MIN(
7262 faceDetectionInfo->num_faces_detected, MAX_ROI);
7263 int32_t faceIds[MAX_ROI];
7264 uint8_t faceScores[MAX_ROI];
7265 int32_t faceRectangles[MAX_ROI * 4];
7266 int32_t faceLandmarks[MAX_ROI * 6];
7267 size_t j = 0, k = 0;
7268
7269 for (size_t i = 0; i < numFaces; i++) {
7270 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7271 // Adjust crop region from sensor output coordinate system to active
7272 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007273 cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
Thierry Strudel3d639192016-09-09 11:52:26 -07007274 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7275 rect.width, rect.height);
7276
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007277 convertToRegions(rect, faceRectangles+j, -1);
Thierry Strudel3d639192016-09-09 11:52:26 -07007278
Jason Lee8ce36fa2017-04-19 19:40:37 -07007279 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7280 "bottom-right (%d, %d)",
7281 faceDetectionInfo->frame_id, i,
7282 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7283 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7284
Thierry Strudel3d639192016-09-09 11:52:26 -07007285 j+= 4;
7286 }
7287 if (numFaces <= 0) {
7288 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7289 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7290 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7291 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7292 }
7293
7294 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7295 numFaces);
7296 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7297 faceRectangles, numFaces * 4U);
7298 if (fwk_faceDetectMode ==
7299 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7300 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7301 CAM_INTF_META_FACE_LANDMARK, metadata) {
7302
7303 for (size_t i = 0; i < numFaces; i++) {
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007304 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
Thierry Strudel3d639192016-09-09 11:52:26 -07007305 // Map the co-ordinate sensor output coordinate system to active
7306 // array coordinate system.
7307 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007308 face_landmarks.left_eye_center.x,
7309 face_landmarks.left_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007310 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007311 face_landmarks.right_eye_center.x,
7312 face_landmarks.right_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007313 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007314 face_landmarks.mouth_center.x,
7315 face_landmarks.mouth_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007316
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007317 convertLandmarks(face_landmarks, faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007318
7319 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7320 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7321 faceDetectionInfo->frame_id, i,
7322 faceLandmarks[k + LEFT_EYE_X],
7323 faceLandmarks[k + LEFT_EYE_Y],
7324 faceLandmarks[k + RIGHT_EYE_X],
7325 faceLandmarks[k + RIGHT_EYE_Y],
7326 faceLandmarks[k + MOUTH_X],
7327 faceLandmarks[k + MOUTH_Y]);
7328
Thierry Strudel04e026f2016-10-10 11:27:36 -07007329 k+= TOTAL_LANDMARK_INDICES;
7330 }
7331 } else {
7332 for (size_t i = 0; i < numFaces; i++) {
7333 setInvalidLandmarks(faceLandmarks+k);
7334 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007335 }
7336 }
7337
Jason Lee49619db2017-04-13 12:07:22 -07007338 for (size_t i = 0; i < numFaces; i++) {
7339 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7340
7341 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7342 faceDetectionInfo->frame_id, i, faceIds[i]);
7343 }
7344
Thierry Strudel3d639192016-09-09 11:52:26 -07007345 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7346 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7347 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007348 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007349 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7350 CAM_INTF_META_FACE_BLINK, metadata) {
7351 uint8_t detected[MAX_ROI];
7352 uint8_t degree[MAX_ROI * 2];
7353 for (size_t i = 0; i < numFaces; i++) {
7354 detected[i] = blinks->blink[i].blink_detected;
7355 degree[2 * i] = blinks->blink[i].left_blink;
7356 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007357
Jason Lee49619db2017-04-13 12:07:22 -07007358 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7359 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7360 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7361 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007362 }
7363 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7364 detected, numFaces);
7365 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7366 degree, numFaces * 2);
7367 }
7368 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7369 CAM_INTF_META_FACE_SMILE, metadata) {
7370 uint8_t degree[MAX_ROI];
7371 uint8_t confidence[MAX_ROI];
7372 for (size_t i = 0; i < numFaces; i++) {
7373 degree[i] = smiles->smile[i].smile_degree;
7374 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007375
Jason Lee49619db2017-04-13 12:07:22 -07007376 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7377 "smile_degree=%d, smile_score=%d",
7378 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007379 }
7380 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7381 degree, numFaces);
7382 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7383 confidence, numFaces);
7384 }
7385 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7386 CAM_INTF_META_FACE_GAZE, metadata) {
7387 int8_t angle[MAX_ROI];
7388 int32_t direction[MAX_ROI * 3];
7389 int8_t degree[MAX_ROI * 2];
7390 for (size_t i = 0; i < numFaces; i++) {
7391 angle[i] = gazes->gaze[i].gaze_angle;
7392 direction[3 * i] = gazes->gaze[i].updown_dir;
7393 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7394 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7395 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7396 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007397
7398 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7399 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7400 "left_right_gaze=%d, top_bottom_gaze=%d",
7401 faceDetectionInfo->frame_id, i, angle[i],
7402 direction[3 * i], direction[3 * i + 1],
7403 direction[3 * i + 2],
7404 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007405 }
7406 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7407 (uint8_t *)angle, numFaces);
7408 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7409 direction, numFaces * 3);
7410 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7411 (uint8_t *)degree, numFaces * 2);
7412 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007413 }
7414 }
7415 }
7416 }
7417
7418 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7419 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007420 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007421 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007422 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007423
Shuzhen Wang14415f52016-11-16 18:26:18 -08007424 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7425 histogramBins = *histBins;
7426 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7427 }
7428
7429 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007430 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7431 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007432 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007433
7434 switch (stats_data->type) {
7435 case CAM_HISTOGRAM_TYPE_BAYER:
7436 switch (stats_data->bayer_stats.data_type) {
7437 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007438 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7439 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007440 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007441 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7442 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007443 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007444 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7445 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007446 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007447 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007448 case CAM_STATS_CHANNEL_R:
7449 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007450 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7451 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007452 }
7453 break;
7454 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007455 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007456 break;
7457 }
7458
Shuzhen Wang14415f52016-11-16 18:26:18 -08007459 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007460 }
7461 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007462 }
7463
7464 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7465 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7466 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7467 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7468 }
7469
7470 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7471 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7472 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7473 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7474 }
7475
7476 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7477 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7478 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7479 CAM_MAX_SHADING_MAP_HEIGHT);
7480 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7481 CAM_MAX_SHADING_MAP_WIDTH);
7482 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7483 lensShadingMap->lens_shading, 4U * map_width * map_height);
7484 }
7485
7486 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7487 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7488 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7489 }
7490
7491 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7492 //Populate CAM_INTF_META_TONEMAP_CURVES
7493 /* ch0 = G, ch 1 = B, ch 2 = R*/
7494 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7495 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7496 tonemap->tonemap_points_cnt,
7497 CAM_MAX_TONEMAP_CURVE_SIZE);
7498 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7499 }
7500
7501 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7502 &tonemap->curves[0].tonemap_points[0][0],
7503 tonemap->tonemap_points_cnt * 2);
7504
7505 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7506 &tonemap->curves[1].tonemap_points[0][0],
7507 tonemap->tonemap_points_cnt * 2);
7508
7509 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7510 &tonemap->curves[2].tonemap_points[0][0],
7511 tonemap->tonemap_points_cnt * 2);
7512 }
7513
7514 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7515 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7516 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7517 CC_GAIN_MAX);
7518 }
7519
7520 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7521 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7522 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7523 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7524 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7525 }
7526
7527 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7528 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7529 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7530 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7531 toneCurve->tonemap_points_cnt,
7532 CAM_MAX_TONEMAP_CURVE_SIZE);
7533 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7534 }
7535 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7536 (float*)toneCurve->curve.tonemap_points,
7537 toneCurve->tonemap_points_cnt * 2);
7538 }
7539
7540 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7541 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7542 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7543 predColorCorrectionGains->gains, 4);
7544 }
7545
7546 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7547 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7548 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7549 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7550 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7551 }
7552
7553 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7554 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7555 }
7556
7557 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7558 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7559 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7560 }
7561
7562 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7563 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7564 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7565 }
7566
7567 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7568 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7569 *effectMode);
7570 if (NAME_NOT_FOUND != val) {
7571 uint8_t fwk_effectMode = (uint8_t)val;
7572 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7573 }
7574 }
7575
7576 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7577 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7578 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7579 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7580 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7581 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7582 }
7583 int32_t fwk_testPatternData[4];
7584 fwk_testPatternData[0] = testPatternData->r;
7585 fwk_testPatternData[3] = testPatternData->b;
7586 switch (gCamCapability[mCameraId]->color_arrangement) {
7587 case CAM_FILTER_ARRANGEMENT_RGGB:
7588 case CAM_FILTER_ARRANGEMENT_GRBG:
7589 fwk_testPatternData[1] = testPatternData->gr;
7590 fwk_testPatternData[2] = testPatternData->gb;
7591 break;
7592 case CAM_FILTER_ARRANGEMENT_GBRG:
7593 case CAM_FILTER_ARRANGEMENT_BGGR:
7594 fwk_testPatternData[2] = testPatternData->gr;
7595 fwk_testPatternData[1] = testPatternData->gb;
7596 break;
7597 default:
7598 LOGE("color arrangement %d is not supported",
7599 gCamCapability[mCameraId]->color_arrangement);
7600 break;
7601 }
7602 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7603 }
7604
7605 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7606 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7607 }
7608
7609 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7610 String8 str((const char *)gps_methods);
7611 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7612 }
7613
7614 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7615 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7616 }
7617
7618 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7619 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7620 }
7621
7622 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7623 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7624 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7625 }
7626
7627 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7628 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7629 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7630 }
7631
7632 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7633 int32_t fwk_thumb_size[2];
7634 fwk_thumb_size[0] = thumb_size->width;
7635 fwk_thumb_size[1] = thumb_size->height;
7636 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7637 }
7638
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007639 // Skip reprocess metadata if there is no input stream.
7640 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7641 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7642 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7643 privateData,
7644 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7645 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007646 }
7647
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007648 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007649 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007650 meteringMode, 1);
7651 }
7652
Thierry Strudel54dc9782017-02-15 12:12:10 -08007653 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7654 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7655 LOGD("hdr_scene_data: %d %f\n",
7656 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7657 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7658 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7659 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7660 &isHdr, 1);
7661 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7662 &isHdrConfidence, 1);
7663 }
7664
7665
7666
Thierry Strudel3d639192016-09-09 11:52:26 -07007667 if (metadata->is_tuning_params_valid) {
7668 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7669 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7670 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7671
7672
7673 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7674 sizeof(uint32_t));
7675 data += sizeof(uint32_t);
7676
7677 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7678 sizeof(uint32_t));
7679 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7680 data += sizeof(uint32_t);
7681
7682 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7683 sizeof(uint32_t));
7684 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7685 data += sizeof(uint32_t);
7686
7687 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7688 sizeof(uint32_t));
7689 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7690 data += sizeof(uint32_t);
7691
7692 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7693 sizeof(uint32_t));
7694 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7695 data += sizeof(uint32_t);
7696
7697 metadata->tuning_params.tuning_mod3_data_size = 0;
7698 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7699 sizeof(uint32_t));
7700 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7701 data += sizeof(uint32_t);
7702
7703 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7704 TUNING_SENSOR_DATA_MAX);
7705 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7706 count);
7707 data += count;
7708
7709 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7710 TUNING_VFE_DATA_MAX);
7711 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7712 count);
7713 data += count;
7714
7715 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7716 TUNING_CPP_DATA_MAX);
7717 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7718 count);
7719 data += count;
7720
7721 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7722 TUNING_CAC_DATA_MAX);
7723 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7724 count);
7725 data += count;
7726
7727 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7728 (int32_t *)(void *)tuning_meta_data_blob,
7729 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7730 }
7731
7732 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7733 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7734 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7735 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7736 NEUTRAL_COL_POINTS);
7737 }
7738
7739 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7740 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7741 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7742 }
7743
7744 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7745 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7746 // Adjust crop region from sensor output coordinate system to active
7747 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007748 cam_rect_t hAeRect = hAeRegions->rect;
7749 mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7750 hAeRect.width, hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007751
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007752 convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
Thierry Strudel3d639192016-09-09 11:52:26 -07007753 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7754 REGIONS_TUPLE_COUNT);
7755 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7756 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007757 hAeRect.left, hAeRect.top, hAeRect.width,
7758 hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007759 }
7760
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007761 if (!pendingRequest.focusStateSent) {
7762 if (pendingRequest.focusStateValid) {
7763 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7764 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007765 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007766 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7767 uint8_t fwk_afState = (uint8_t) *afState;
7768 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7769 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7770 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007771 }
7772 }
7773
Thierry Strudel3d639192016-09-09 11:52:26 -07007774 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7775 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7776 }
7777
7778 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7779 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7780 }
7781
7782 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7783 uint8_t fwk_lensState = *lensState;
7784 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7785 }
7786
Thierry Strudel3d639192016-09-09 11:52:26 -07007787 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007788 uint32_t ab_mode = *hal_ab_mode;
7789 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7790 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7791 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7792 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007793 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007794 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007795 if (NAME_NOT_FOUND != val) {
7796 uint8_t fwk_ab_mode = (uint8_t)val;
7797 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7798 }
7799 }
7800
7801 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7802 int val = lookupFwkName(SCENE_MODES_MAP,
7803 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7804 if (NAME_NOT_FOUND != val) {
7805 uint8_t fwkBestshotMode = (uint8_t)val;
7806 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7807 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7808 } else {
7809 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7810 }
7811 }
7812
7813 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7814 uint8_t fwk_mode = (uint8_t) *mode;
7815 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7816 }
7817
7818 /* Constant metadata values to be update*/
7819 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7820 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7821
7822 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7823 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7824
7825 int32_t hotPixelMap[2];
7826 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7827
7828 // CDS
7829 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7830 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7831 }
7832
Thierry Strudel04e026f2016-10-10 11:27:36 -07007833 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7834 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007835 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007836 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7837 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7838 } else {
7839 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7840 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007841
7842 if(fwk_hdr != curr_hdr_state) {
7843 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7844 if(fwk_hdr)
7845 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7846 else
7847 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7848 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007849 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7850 }
7851
Thierry Strudel54dc9782017-02-15 12:12:10 -08007852 //binning correction
7853 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7854 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7855 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7856 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7857 }
7858
Thierry Strudel04e026f2016-10-10 11:27:36 -07007859 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007860 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007861 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7862 int8_t is_ir_on = 0;
7863
7864 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7865 if(is_ir_on != curr_ir_state) {
7866 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7867 if(is_ir_on)
7868 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7869 else
7870 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7871 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007872 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007873 }
7874
Thierry Strudel269c81a2016-10-12 12:13:59 -07007875 // AEC SPEED
7876 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7877 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7878 }
7879
7880 // AWB SPEED
7881 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7882 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7883 }
7884
Thierry Strudel3d639192016-09-09 11:52:26 -07007885 // TNR
7886 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7887 uint8_t tnr_enable = tnr->denoise_enable;
7888 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007889 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7890 int8_t is_tnr_on = 0;
7891
7892 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7893 if(is_tnr_on != curr_tnr_state) {
7894 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7895 if(is_tnr_on)
7896 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7897 else
7898 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7899 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007900
7901 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7902 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7903 }
7904
7905 // Reprocess crop data
7906 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7907 uint8_t cnt = crop_data->num_of_streams;
7908 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7909 // mm-qcamera-daemon only posts crop_data for streams
7910 // not linked to pproc. So no valid crop metadata is not
7911 // necessarily an error case.
7912 LOGD("No valid crop metadata entries");
7913 } else {
7914 uint32_t reproc_stream_id;
7915 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7916 LOGD("No reprocessible stream found, ignore crop data");
7917 } else {
7918 int rc = NO_ERROR;
7919 Vector<int32_t> roi_map;
7920 int32_t *crop = new int32_t[cnt*4];
7921 if (NULL == crop) {
7922 rc = NO_MEMORY;
7923 }
7924 if (NO_ERROR == rc) {
7925 int32_t streams_found = 0;
7926 for (size_t i = 0; i < cnt; i++) {
7927 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7928 if (pprocDone) {
7929 // HAL already does internal reprocessing,
7930 // either via reprocessing before JPEG encoding,
7931 // or offline postprocessing for pproc bypass case.
7932 crop[0] = 0;
7933 crop[1] = 0;
7934 crop[2] = mInputStreamInfo.dim.width;
7935 crop[3] = mInputStreamInfo.dim.height;
7936 } else {
7937 crop[0] = crop_data->crop_info[i].crop.left;
7938 crop[1] = crop_data->crop_info[i].crop.top;
7939 crop[2] = crop_data->crop_info[i].crop.width;
7940 crop[3] = crop_data->crop_info[i].crop.height;
7941 }
7942 roi_map.add(crop_data->crop_info[i].roi_map.left);
7943 roi_map.add(crop_data->crop_info[i].roi_map.top);
7944 roi_map.add(crop_data->crop_info[i].roi_map.width);
7945 roi_map.add(crop_data->crop_info[i].roi_map.height);
7946 streams_found++;
7947 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7948 crop[0], crop[1], crop[2], crop[3]);
7949 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7950 crop_data->crop_info[i].roi_map.left,
7951 crop_data->crop_info[i].roi_map.top,
7952 crop_data->crop_info[i].roi_map.width,
7953 crop_data->crop_info[i].roi_map.height);
7954 break;
7955
7956 }
7957 }
7958 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7959 &streams_found, 1);
7960 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7961 crop, (size_t)(streams_found * 4));
7962 if (roi_map.array()) {
7963 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7964 roi_map.array(), roi_map.size());
7965 }
7966 }
7967 if (crop) {
7968 delete [] crop;
7969 }
7970 }
7971 }
7972 }
7973
7974 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7975 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7976 // so hardcoding the CAC result to OFF mode.
7977 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7978 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7979 } else {
7980 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7981 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7982 *cacMode);
7983 if (NAME_NOT_FOUND != val) {
7984 uint8_t resultCacMode = (uint8_t)val;
7985 // check whether CAC result from CB is equal to Framework set CAC mode
7986 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007987 if (pendingRequest.fwkCacMode != resultCacMode) {
7988 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07007989 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007990 //Check if CAC is disabled by property
7991 if (m_cacModeDisabled) {
7992 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7993 }
7994
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007995 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007996 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7997 } else {
7998 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7999 }
8000 }
8001 }
8002
8003 // Post blob of cam_cds_data through vendor tag.
8004 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8005 uint8_t cnt = cdsInfo->num_of_streams;
8006 cam_cds_data_t cdsDataOverride;
8007 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8008 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8009 cdsDataOverride.num_of_streams = 1;
8010 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8011 uint32_t reproc_stream_id;
8012 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8013 LOGD("No reprocessible stream found, ignore cds data");
8014 } else {
8015 for (size_t i = 0; i < cnt; i++) {
8016 if (cdsInfo->cds_info[i].stream_id ==
8017 reproc_stream_id) {
8018 cdsDataOverride.cds_info[0].cds_enable =
8019 cdsInfo->cds_info[i].cds_enable;
8020 break;
8021 }
8022 }
8023 }
8024 } else {
8025 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8026 }
8027 camMetadata.update(QCAMERA3_CDS_INFO,
8028 (uint8_t *)&cdsDataOverride,
8029 sizeof(cam_cds_data_t));
8030 }
8031
8032 // Ldaf calibration data
8033 if (!mLdafCalibExist) {
8034 IF_META_AVAILABLE(uint32_t, ldafCalib,
8035 CAM_INTF_META_LDAF_EXIF, metadata) {
8036 mLdafCalibExist = true;
8037 mLdafCalib[0] = ldafCalib[0];
8038 mLdafCalib[1] = ldafCalib[1];
8039 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8040 ldafCalib[0], ldafCalib[1]);
8041 }
8042 }
8043
Thierry Strudel54dc9782017-02-15 12:12:10 -08008044 // EXIF debug data through vendor tag
8045 /*
8046 * Mobicat Mask can assume 3 values:
8047 * 1 refers to Mobicat data,
8048 * 2 refers to Stats Debug and Exif Debug Data
8049 * 3 refers to Mobicat and Stats Debug Data
8050 * We want to make sure that we are sending Exif debug data
8051 * only when Mobicat Mask is 2.
8052 */
8053 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8054 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8055 (uint8_t *)(void *)mExifParams.debug_params,
8056 sizeof(mm_jpeg_debug_exif_params_t));
8057 }
8058
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008059 // Reprocess and DDM debug data through vendor tag
8060 cam_reprocess_info_t repro_info;
8061 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008062 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8063 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008064 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008065 }
8066 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8067 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008068 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008069 }
8070 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8071 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008072 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008073 }
8074 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8075 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008076 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008077 }
8078 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8079 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008080 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008081 }
8082 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008083 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008084 }
8085 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8086 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008087 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008088 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008089 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8090 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8091 }
8092 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8093 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8094 }
8095 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8096 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008097
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008098 // INSTANT AEC MODE
8099 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8100 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8101 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8102 }
8103
Shuzhen Wange763e802016-03-31 10:24:29 -07008104 // AF scene change
8105 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8106 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8107 }
8108
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008109 // Enable ZSL
8110 if (enableZsl != nullptr) {
8111 uint8_t value = *enableZsl ?
8112 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8113 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8114 }
8115
Xu Han821ea9c2017-05-23 09:00:40 -07008116 // OIS Data
8117 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8118 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8119 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8120 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8121 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8122 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8123 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8124 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8125 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8126 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8127 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
Xue Tu2c3e9142017-08-18 16:23:52 -07008128 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8129 frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8130 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8131 frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
Xu Han821ea9c2017-05-23 09:00:40 -07008132 }
8133
Thierry Strudel3d639192016-09-09 11:52:26 -07008134 resultMetadata = camMetadata.release();
8135 return resultMetadata;
8136}
8137
8138/*===========================================================================
8139 * FUNCTION : saveExifParams
8140 *
8141 * DESCRIPTION:
8142 *
8143 * PARAMETERS :
8144 * @metadata : metadata information from callback
8145 *
8146 * RETURN : none
8147 *
8148 *==========================================================================*/
8149void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8150{
8151 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8152 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8153 if (mExifParams.debug_params) {
8154 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8155 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8156 }
8157 }
8158 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8159 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8160 if (mExifParams.debug_params) {
8161 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8162 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8163 }
8164 }
8165 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8166 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8167 if (mExifParams.debug_params) {
8168 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8169 mExifParams.debug_params->af_debug_params_valid = TRUE;
8170 }
8171 }
8172 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8173 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8174 if (mExifParams.debug_params) {
8175 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8176 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8177 }
8178 }
8179 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8180 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8181 if (mExifParams.debug_params) {
8182 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8183 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8184 }
8185 }
8186 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8187 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8188 if (mExifParams.debug_params) {
8189 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8190 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8191 }
8192 }
8193 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8194 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8195 if (mExifParams.debug_params) {
8196 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8197 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8198 }
8199 }
8200 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8201 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8202 if (mExifParams.debug_params) {
8203 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8204 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8205 }
8206 }
8207}
8208
8209/*===========================================================================
8210 * FUNCTION : get3AExifParams
8211 *
8212 * DESCRIPTION:
8213 *
8214 * PARAMETERS : none
8215 *
8216 *
8217 * RETURN : mm_jpeg_exif_params_t
8218 *
8219 *==========================================================================*/
8220mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8221{
8222 return mExifParams;
8223}
8224
8225/*===========================================================================
8226 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8227 *
8228 * DESCRIPTION:
8229 *
8230 * PARAMETERS :
8231 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008232 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8233 * urgent metadata in a batch. Always true for
8234 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008235 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008236 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8237 * i.e. even though it doesn't map to a valid partial
8238 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008239 * RETURN : camera_metadata_t*
8240 * metadata in a format specified by fwk
8241 *==========================================================================*/
8242camera_metadata_t*
8243QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008244 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008245 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008246{
8247 CameraMetadata camMetadata;
8248 camera_metadata_t *resultMetadata;
8249
Shuzhen Wang485e2442017-08-02 12:21:08 -07008250 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008251 /* In batch mode, use empty metadata if this is not the last in batch
8252 */
8253 resultMetadata = allocate_camera_metadata(0, 0);
8254 return resultMetadata;
8255 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008256
8257 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8258 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8259 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8260 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8261 }
8262
8263 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8264 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8265 &aecTrigger->trigger, 1);
8266 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8267 &aecTrigger->trigger_id, 1);
8268 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8269 aecTrigger->trigger);
8270 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8271 aecTrigger->trigger_id);
8272 }
8273
8274 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8275 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8276 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8277 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8278 }
8279
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008280 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8281 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8282 if (NAME_NOT_FOUND != val) {
8283 uint8_t fwkAfMode = (uint8_t)val;
8284 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8285 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8286 } else {
8287 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8288 val);
8289 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008290 }
8291
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008292 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8293 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8294 af_trigger->trigger);
8295 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8296 af_trigger->trigger_id);
8297
8298 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8299 mAfTrigger = *af_trigger;
8300 uint32_t fwk_AfState = (uint32_t) *afState;
8301
8302 // If this is the result for a new trigger, check if there is new early
8303 // af state. If there is, use the last af state for all results
8304 // preceding current partial frame number.
8305 for (auto & pendingRequest : mPendingRequestsList) {
8306 if (pendingRequest.frame_number < frame_number) {
8307 pendingRequest.focusStateValid = true;
8308 pendingRequest.focusState = fwk_AfState;
8309 } else if (pendingRequest.frame_number == frame_number) {
8310 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8311 // Check if early AF state for trigger exists. If yes, send AF state as
8312 // partial result for better latency.
8313 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8314 pendingRequest.focusStateSent = true;
8315 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8316 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8317 frame_number, fwkEarlyAfState);
8318 }
8319 }
8320 }
8321 }
8322 }
8323 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8324 &mAfTrigger.trigger, 1);
8325 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8326
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008327 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8328 /*af regions*/
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008329 cam_rect_t hAfRect = hAfRegions->rect;
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008330 int32_t afRegions[REGIONS_TUPLE_COUNT];
8331 // Adjust crop region from sensor output coordinate system to active
8332 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008333 mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8334 hAfRect.width, hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008335
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008336 convertToRegions(hAfRect, afRegions, hAfRegions->weight);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008337 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8338 REGIONS_TUPLE_COUNT);
8339 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8340 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008341 hAfRect.left, hAfRect.top, hAfRect.width,
8342 hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008343 }
8344
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008345 // AF region confidence
8346 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8347 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8348 }
8349
Thierry Strudel3d639192016-09-09 11:52:26 -07008350 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8351 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8352 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8353 if (NAME_NOT_FOUND != val) {
8354 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8355 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8356 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8357 } else {
8358 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8359 }
8360 }
8361
8362 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8363 uint32_t aeMode = CAM_AE_MODE_MAX;
8364 int32_t flashMode = CAM_FLASH_MODE_MAX;
8365 int32_t redeye = -1;
8366 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8367 aeMode = *pAeMode;
8368 }
8369 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8370 flashMode = *pFlashMode;
8371 }
8372 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8373 redeye = *pRedeye;
8374 }
8375
8376 if (1 == redeye) {
8377 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8378 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8379 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8380 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8381 flashMode);
8382 if (NAME_NOT_FOUND != val) {
8383 fwk_aeMode = (uint8_t)val;
8384 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8385 } else {
8386 LOGE("Unsupported flash mode %d", flashMode);
8387 }
8388 } else if (aeMode == CAM_AE_MODE_ON) {
8389 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8390 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8391 } else if (aeMode == CAM_AE_MODE_OFF) {
8392 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8393 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008394 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8395 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8396 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008397 } else {
8398 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8399 "flashMode:%d, aeMode:%u!!!",
8400 redeye, flashMode, aeMode);
8401 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008402 if (mInstantAEC) {
8403 // Increment frame Idx count untill a bound reached for instant AEC.
8404 mInstantAecFrameIdxCount++;
8405 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8406 CAM_INTF_META_AEC_INFO, metadata) {
8407 LOGH("ae_params->settled = %d",ae_params->settled);
8408 // If AEC settled, or if number of frames reached bound value,
8409 // should reset instant AEC.
8410 if (ae_params->settled ||
8411 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8412 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8413 mInstantAEC = false;
8414 mResetInstantAEC = true;
8415 mInstantAecFrameIdxCount = 0;
8416 }
8417 }
8418 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008419 resultMetadata = camMetadata.release();
8420 return resultMetadata;
8421}
8422
8423/*===========================================================================
8424 * FUNCTION : dumpMetadataToFile
8425 *
8426 * DESCRIPTION: Dumps tuning metadata to file system
8427 *
8428 * PARAMETERS :
8429 * @meta : tuning metadata
8430 * @dumpFrameCount : current dump frame count
8431 * @enabled : Enable mask
8432 *
8433 *==========================================================================*/
8434void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8435 uint32_t &dumpFrameCount,
8436 bool enabled,
8437 const char *type,
8438 uint32_t frameNumber)
8439{
8440 //Some sanity checks
8441 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8442 LOGE("Tuning sensor data size bigger than expected %d: %d",
8443 meta.tuning_sensor_data_size,
8444 TUNING_SENSOR_DATA_MAX);
8445 return;
8446 }
8447
8448 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8449 LOGE("Tuning VFE data size bigger than expected %d: %d",
8450 meta.tuning_vfe_data_size,
8451 TUNING_VFE_DATA_MAX);
8452 return;
8453 }
8454
8455 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8456 LOGE("Tuning CPP data size bigger than expected %d: %d",
8457 meta.tuning_cpp_data_size,
8458 TUNING_CPP_DATA_MAX);
8459 return;
8460 }
8461
8462 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8463 LOGE("Tuning CAC data size bigger than expected %d: %d",
8464 meta.tuning_cac_data_size,
8465 TUNING_CAC_DATA_MAX);
8466 return;
8467 }
8468 //
8469
8470 if(enabled){
8471 char timeBuf[FILENAME_MAX];
8472 char buf[FILENAME_MAX];
8473 memset(buf, 0, sizeof(buf));
8474 memset(timeBuf, 0, sizeof(timeBuf));
8475 time_t current_time;
8476 struct tm * timeinfo;
8477 time (&current_time);
8478 timeinfo = localtime (&current_time);
8479 if (timeinfo != NULL) {
8480 strftime (timeBuf, sizeof(timeBuf),
8481 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8482 }
8483 String8 filePath(timeBuf);
8484 snprintf(buf,
8485 sizeof(buf),
8486 "%dm_%s_%d.bin",
8487 dumpFrameCount,
8488 type,
8489 frameNumber);
8490 filePath.append(buf);
8491 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8492 if (file_fd >= 0) {
8493 ssize_t written_len = 0;
8494 meta.tuning_data_version = TUNING_DATA_VERSION;
8495 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8496 written_len += write(file_fd, data, sizeof(uint32_t));
8497 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8498 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8499 written_len += write(file_fd, data, sizeof(uint32_t));
8500 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8501 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8502 written_len += write(file_fd, data, sizeof(uint32_t));
8503 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8504 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8505 written_len += write(file_fd, data, sizeof(uint32_t));
8506 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8507 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8508 written_len += write(file_fd, data, sizeof(uint32_t));
8509 meta.tuning_mod3_data_size = 0;
8510 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8511 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8512 written_len += write(file_fd, data, sizeof(uint32_t));
8513 size_t total_size = meta.tuning_sensor_data_size;
8514 data = (void *)((uint8_t *)&meta.data);
8515 written_len += write(file_fd, data, total_size);
8516 total_size = meta.tuning_vfe_data_size;
8517 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8518 written_len += write(file_fd, data, total_size);
8519 total_size = meta.tuning_cpp_data_size;
8520 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8521 written_len += write(file_fd, data, total_size);
8522 total_size = meta.tuning_cac_data_size;
8523 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8524 written_len += write(file_fd, data, total_size);
8525 close(file_fd);
8526 }else {
8527 LOGE("fail to open file for metadata dumping");
8528 }
8529 }
8530}
8531
8532/*===========================================================================
8533 * FUNCTION : cleanAndSortStreamInfo
8534 *
8535 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8536 * and sort them such that raw stream is at the end of the list
8537 * This is a workaround for camera daemon constraint.
8538 *
8539 * PARAMETERS : None
8540 *
8541 *==========================================================================*/
8542void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8543{
8544 List<stream_info_t *> newStreamInfo;
8545
8546 /*clean up invalid streams*/
8547 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8548 it != mStreamInfo.end();) {
8549 if(((*it)->status) == INVALID){
8550 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8551 delete channel;
8552 free(*it);
8553 it = mStreamInfo.erase(it);
8554 } else {
8555 it++;
8556 }
8557 }
8558
8559 // Move preview/video/callback/snapshot streams into newList
8560 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8561 it != mStreamInfo.end();) {
8562 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8563 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8564 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8565 newStreamInfo.push_back(*it);
8566 it = mStreamInfo.erase(it);
8567 } else
8568 it++;
8569 }
8570 // Move raw streams into newList
8571 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8572 it != mStreamInfo.end();) {
8573 newStreamInfo.push_back(*it);
8574 it = mStreamInfo.erase(it);
8575 }
8576
8577 mStreamInfo = newStreamInfo;
8578}
8579
8580/*===========================================================================
8581 * FUNCTION : extractJpegMetadata
8582 *
8583 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8584 * JPEG metadata is cached in HAL, and return as part of capture
8585 * result when metadata is returned from camera daemon.
8586 *
8587 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8588 * @request: capture request
8589 *
8590 *==========================================================================*/
8591void QCamera3HardwareInterface::extractJpegMetadata(
8592 CameraMetadata& jpegMetadata,
8593 const camera3_capture_request_t *request)
8594{
8595 CameraMetadata frame_settings;
8596 frame_settings = request->settings;
8597
8598 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8599 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8600 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8601 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8602
8603 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8604 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8605 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8606 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8607
8608 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8609 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8610 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8611 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8612
8613 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8614 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8615 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8616 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8617
8618 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8619 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8620 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8621 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8622
8623 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8624 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8625 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8626 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8627
8628 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8629 int32_t thumbnail_size[2];
8630 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8631 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8632 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8633 int32_t orientation =
8634 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008635 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008636 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8637 int32_t temp;
8638 temp = thumbnail_size[0];
8639 thumbnail_size[0] = thumbnail_size[1];
8640 thumbnail_size[1] = temp;
8641 }
8642 }
8643 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8644 thumbnail_size,
8645 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8646 }
8647
8648}
8649
8650/*===========================================================================
8651 * FUNCTION : convertToRegions
8652 *
8653 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8654 *
8655 * PARAMETERS :
8656 * @rect : cam_rect_t struct to convert
8657 * @region : int32_t destination array
8658 * @weight : if we are converting from cam_area_t, weight is valid
8659 * else weight = -1
8660 *
8661 *==========================================================================*/
8662void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8663 int32_t *region, int weight)
8664{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008665 region[FACE_LEFT] = rect.left;
8666 region[FACE_TOP] = rect.top;
8667 region[FACE_RIGHT] = rect.left + rect.width;
8668 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008669 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008670 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008671 }
8672}
8673
8674/*===========================================================================
8675 * FUNCTION : convertFromRegions
8676 *
8677 * DESCRIPTION: helper method to convert from array to cam_rect_t
8678 *
8679 * PARAMETERS :
8680 * @rect : cam_rect_t struct to convert
8681 * @region : int32_t destination array
8682 * @weight : if we are converting from cam_area_t, weight is valid
8683 * else weight = -1
8684 *
8685 *==========================================================================*/
8686void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008687 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008688{
Thierry Strudel3d639192016-09-09 11:52:26 -07008689 int32_t x_min = frame_settings.find(tag).data.i32[0];
8690 int32_t y_min = frame_settings.find(tag).data.i32[1];
8691 int32_t x_max = frame_settings.find(tag).data.i32[2];
8692 int32_t y_max = frame_settings.find(tag).data.i32[3];
8693 roi.weight = frame_settings.find(tag).data.i32[4];
8694 roi.rect.left = x_min;
8695 roi.rect.top = y_min;
8696 roi.rect.width = x_max - x_min;
8697 roi.rect.height = y_max - y_min;
8698}
8699
8700/*===========================================================================
8701 * FUNCTION : resetIfNeededROI
8702 *
8703 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8704 * crop region
8705 *
8706 * PARAMETERS :
8707 * @roi : cam_area_t struct to resize
8708 * @scalerCropRegion : cam_crop_region_t region to compare against
8709 *
8710 *
8711 *==========================================================================*/
8712bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8713 const cam_crop_region_t* scalerCropRegion)
8714{
8715 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8716 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8717 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8718 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8719
8720 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8721 * without having this check the calculations below to validate if the roi
8722 * is inside scalar crop region will fail resulting in the roi not being
8723 * reset causing algorithm to continue to use stale roi window
8724 */
8725 if (roi->weight == 0) {
8726 return true;
8727 }
8728
8729 if ((roi_x_max < scalerCropRegion->left) ||
8730 // right edge of roi window is left of scalar crop's left edge
8731 (roi_y_max < scalerCropRegion->top) ||
8732 // bottom edge of roi window is above scalar crop's top edge
8733 (roi->rect.left > crop_x_max) ||
8734 // left edge of roi window is beyond(right) of scalar crop's right edge
8735 (roi->rect.top > crop_y_max)){
8736 // top edge of roi windo is above scalar crop's top edge
8737 return false;
8738 }
8739 if (roi->rect.left < scalerCropRegion->left) {
8740 roi->rect.left = scalerCropRegion->left;
8741 }
8742 if (roi->rect.top < scalerCropRegion->top) {
8743 roi->rect.top = scalerCropRegion->top;
8744 }
8745 if (roi_x_max > crop_x_max) {
8746 roi_x_max = crop_x_max;
8747 }
8748 if (roi_y_max > crop_y_max) {
8749 roi_y_max = crop_y_max;
8750 }
8751 roi->rect.width = roi_x_max - roi->rect.left;
8752 roi->rect.height = roi_y_max - roi->rect.top;
8753 return true;
8754}
8755
8756/*===========================================================================
8757 * FUNCTION : convertLandmarks
8758 *
8759 * DESCRIPTION: helper method to extract the landmarks from face detection info
8760 *
8761 * PARAMETERS :
8762 * @landmark_data : input landmark data to be converted
8763 * @landmarks : int32_t destination array
8764 *
8765 *
8766 *==========================================================================*/
8767void QCamera3HardwareInterface::convertLandmarks(
8768 cam_face_landmarks_info_t landmark_data,
8769 int32_t *landmarks)
8770{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008771 if (landmark_data.is_left_eye_valid) {
8772 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8773 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8774 } else {
8775 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8776 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8777 }
8778
8779 if (landmark_data.is_right_eye_valid) {
8780 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8781 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8782 } else {
8783 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8784 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8785 }
8786
8787 if (landmark_data.is_mouth_valid) {
8788 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8789 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8790 } else {
8791 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8792 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8793 }
8794}
8795
8796/*===========================================================================
8797 * FUNCTION : setInvalidLandmarks
8798 *
8799 * DESCRIPTION: helper method to set invalid landmarks
8800 *
8801 * PARAMETERS :
8802 * @landmarks : int32_t destination array
8803 *
8804 *
8805 *==========================================================================*/
8806void QCamera3HardwareInterface::setInvalidLandmarks(
8807 int32_t *landmarks)
8808{
8809 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8810 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8811 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8812 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8813 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8814 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008815}
8816
8817#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008818
8819/*===========================================================================
8820 * FUNCTION : getCapabilities
8821 *
8822 * DESCRIPTION: query camera capability from back-end
8823 *
8824 * PARAMETERS :
8825 * @ops : mm-interface ops structure
8826 * @cam_handle : camera handle for which we need capability
8827 *
8828 * RETURN : ptr type of capability structure
8829 * capability for success
8830 * NULL for failure
8831 *==========================================================================*/
8832cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8833 uint32_t cam_handle)
8834{
8835 int rc = NO_ERROR;
8836 QCamera3HeapMemory *capabilityHeap = NULL;
8837 cam_capability_t *cap_ptr = NULL;
8838
8839 if (ops == NULL) {
8840 LOGE("Invalid arguments");
8841 return NULL;
8842 }
8843
8844 capabilityHeap = new QCamera3HeapMemory(1);
8845 if (capabilityHeap == NULL) {
8846 LOGE("creation of capabilityHeap failed");
8847 return NULL;
8848 }
8849
8850 /* Allocate memory for capability buffer */
8851 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8852 if(rc != OK) {
8853 LOGE("No memory for cappability");
8854 goto allocate_failed;
8855 }
8856
8857 /* Map memory for capability buffer */
8858 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8859
8860 rc = ops->map_buf(cam_handle,
8861 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8862 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8863 if(rc < 0) {
8864 LOGE("failed to map capability buffer");
8865 rc = FAILED_TRANSACTION;
8866 goto map_failed;
8867 }
8868
8869 /* Query Capability */
8870 rc = ops->query_capability(cam_handle);
8871 if(rc < 0) {
8872 LOGE("failed to query capability");
8873 rc = FAILED_TRANSACTION;
8874 goto query_failed;
8875 }
8876
8877 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8878 if (cap_ptr == NULL) {
8879 LOGE("out of memory");
8880 rc = NO_MEMORY;
8881 goto query_failed;
8882 }
8883
8884 memset(cap_ptr, 0, sizeof(cam_capability_t));
8885 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8886
8887 int index;
8888 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8889 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8890 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8891 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8892 }
8893
8894query_failed:
8895 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8896map_failed:
8897 capabilityHeap->deallocate();
8898allocate_failed:
8899 delete capabilityHeap;
8900
8901 if (rc != NO_ERROR) {
8902 return NULL;
8903 } else {
8904 return cap_ptr;
8905 }
8906}
8907
Thierry Strudel3d639192016-09-09 11:52:26 -07008908/*===========================================================================
8909 * FUNCTION : initCapabilities
8910 *
8911 * DESCRIPTION: initialize camera capabilities in static data struct
8912 *
8913 * PARAMETERS :
8914 * @cameraId : camera Id
8915 *
8916 * RETURN : int32_t type of status
8917 * NO_ERROR -- success
8918 * none-zero failure code
8919 *==========================================================================*/
8920int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8921{
8922 int rc = 0;
8923 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008924 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008925
8926 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8927 if (rc) {
8928 LOGE("camera_open failed. rc = %d", rc);
8929 goto open_failed;
8930 }
8931 if (!cameraHandle) {
8932 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8933 goto open_failed;
8934 }
8935
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008936 handle = get_main_camera_handle(cameraHandle->camera_handle);
8937 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8938 if (gCamCapability[cameraId] == NULL) {
8939 rc = FAILED_TRANSACTION;
8940 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008941 }
8942
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008943 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008944 if (is_dual_camera_by_idx(cameraId)) {
8945 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8946 gCamCapability[cameraId]->aux_cam_cap =
8947 getCapabilities(cameraHandle->ops, handle);
8948 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8949 rc = FAILED_TRANSACTION;
8950 free(gCamCapability[cameraId]);
8951 goto failed_op;
8952 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008953
8954 // Copy the main camera capability to main_cam_cap struct
8955 gCamCapability[cameraId]->main_cam_cap =
8956 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8957 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8958 LOGE("out of memory");
8959 rc = NO_MEMORY;
8960 goto failed_op;
8961 }
8962 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8963 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008964 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008965failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008966 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8967 cameraHandle = NULL;
8968open_failed:
8969 return rc;
8970}
8971
8972/*==========================================================================
8973 * FUNCTION : get3Aversion
8974 *
8975 * DESCRIPTION: get the Q3A S/W version
8976 *
8977 * PARAMETERS :
8978 * @sw_version: Reference of Q3A structure which will hold version info upon
8979 * return
8980 *
8981 * RETURN : None
8982 *
8983 *==========================================================================*/
8984void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8985{
8986 if(gCamCapability[mCameraId])
8987 sw_version = gCamCapability[mCameraId]->q3a_version;
8988 else
8989 LOGE("Capability structure NULL!");
8990}
8991
8992
8993/*===========================================================================
8994 * FUNCTION : initParameters
8995 *
8996 * DESCRIPTION: initialize camera parameters
8997 *
8998 * PARAMETERS :
8999 *
9000 * RETURN : int32_t type of status
9001 * NO_ERROR -- success
9002 * none-zero failure code
9003 *==========================================================================*/
9004int QCamera3HardwareInterface::initParameters()
9005{
9006 int rc = 0;
9007
9008 //Allocate Set Param Buffer
9009 mParamHeap = new QCamera3HeapMemory(1);
9010 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9011 if(rc != OK) {
9012 rc = NO_MEMORY;
9013 LOGE("Failed to allocate SETPARM Heap memory");
9014 delete mParamHeap;
9015 mParamHeap = NULL;
9016 return rc;
9017 }
9018
9019 //Map memory for parameters buffer
9020 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9021 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9022 mParamHeap->getFd(0),
9023 sizeof(metadata_buffer_t),
9024 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9025 if(rc < 0) {
9026 LOGE("failed to map SETPARM buffer");
9027 rc = FAILED_TRANSACTION;
9028 mParamHeap->deallocate();
9029 delete mParamHeap;
9030 mParamHeap = NULL;
9031 return rc;
9032 }
9033
9034 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9035
9036 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9037 return rc;
9038}
9039
9040/*===========================================================================
9041 * FUNCTION : deinitParameters
9042 *
9043 * DESCRIPTION: de-initialize camera parameters
9044 *
9045 * PARAMETERS :
9046 *
9047 * RETURN : NONE
9048 *==========================================================================*/
9049void QCamera3HardwareInterface::deinitParameters()
9050{
9051 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9052 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9053
9054 mParamHeap->deallocate();
9055 delete mParamHeap;
9056 mParamHeap = NULL;
9057
9058 mParameters = NULL;
9059
9060 free(mPrevParameters);
9061 mPrevParameters = NULL;
9062}
9063
9064/*===========================================================================
9065 * FUNCTION : calcMaxJpegSize
9066 *
9067 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9068 *
9069 * PARAMETERS :
9070 *
9071 * RETURN : max_jpeg_size
9072 *==========================================================================*/
9073size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9074{
9075 size_t max_jpeg_size = 0;
9076 size_t temp_width, temp_height;
9077 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9078 MAX_SIZES_CNT);
9079 for (size_t i = 0; i < count; i++) {
9080 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9081 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9082 if (temp_width * temp_height > max_jpeg_size ) {
9083 max_jpeg_size = temp_width * temp_height;
9084 }
9085 }
9086 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9087 return max_jpeg_size;
9088}
9089
9090/*===========================================================================
9091 * FUNCTION : getMaxRawSize
9092 *
9093 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9094 *
9095 * PARAMETERS :
9096 *
9097 * RETURN : Largest supported Raw Dimension
9098 *==========================================================================*/
9099cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9100{
9101 int max_width = 0;
9102 cam_dimension_t maxRawSize;
9103
9104 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9105 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9106 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9107 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9108 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9109 }
9110 }
9111 return maxRawSize;
9112}
9113
9114
9115/*===========================================================================
9116 * FUNCTION : calcMaxJpegDim
9117 *
9118 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9119 *
9120 * PARAMETERS :
9121 *
9122 * RETURN : max_jpeg_dim
9123 *==========================================================================*/
9124cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9125{
9126 cam_dimension_t max_jpeg_dim;
9127 cam_dimension_t curr_jpeg_dim;
9128 max_jpeg_dim.width = 0;
9129 max_jpeg_dim.height = 0;
9130 curr_jpeg_dim.width = 0;
9131 curr_jpeg_dim.height = 0;
9132 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9133 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9134 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9135 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9136 max_jpeg_dim.width * max_jpeg_dim.height ) {
9137 max_jpeg_dim.width = curr_jpeg_dim.width;
9138 max_jpeg_dim.height = curr_jpeg_dim.height;
9139 }
9140 }
9141 return max_jpeg_dim;
9142}
9143
9144/*===========================================================================
9145 * FUNCTION : addStreamConfig
9146 *
9147 * DESCRIPTION: adds the stream configuration to the array
9148 *
9149 * PARAMETERS :
9150 * @available_stream_configs : pointer to stream configuration array
9151 * @scalar_format : scalar format
9152 * @dim : configuration dimension
9153 * @config_type : input or output configuration type
9154 *
9155 * RETURN : NONE
9156 *==========================================================================*/
9157void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9158 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9159{
9160 available_stream_configs.add(scalar_format);
9161 available_stream_configs.add(dim.width);
9162 available_stream_configs.add(dim.height);
9163 available_stream_configs.add(config_type);
9164}
9165
9166/*===========================================================================
9167 * FUNCTION : suppportBurstCapture
9168 *
9169 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9170 *
9171 * PARAMETERS :
9172 * @cameraId : camera Id
9173 *
9174 * RETURN : true if camera supports BURST_CAPTURE
9175 * false otherwise
9176 *==========================================================================*/
9177bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9178{
9179 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9180 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9181 const int32_t highResWidth = 3264;
9182 const int32_t highResHeight = 2448;
9183
9184 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9185 // Maximum resolution images cannot be captured at >= 10fps
9186 // -> not supporting BURST_CAPTURE
9187 return false;
9188 }
9189
9190 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9191 // Maximum resolution images can be captured at >= 20fps
9192 // --> supporting BURST_CAPTURE
9193 return true;
9194 }
9195
9196 // Find the smallest highRes resolution, or largest resolution if there is none
9197 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9198 MAX_SIZES_CNT);
9199 size_t highRes = 0;
9200 while ((highRes + 1 < totalCnt) &&
9201 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9202 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9203 highResWidth * highResHeight)) {
9204 highRes++;
9205 }
9206 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9207 return true;
9208 } else {
9209 return false;
9210 }
9211}
9212
9213/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009214 * FUNCTION : getPDStatIndex
9215 *
9216 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9217 *
9218 * PARAMETERS :
9219 * @caps : camera capabilities
9220 *
9221 * RETURN : int32_t type
9222 * non-negative - on success
9223 * -1 - on failure
9224 *==========================================================================*/
9225int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9226 if (nullptr == caps) {
9227 return -1;
9228 }
9229
9230 uint32_t metaRawCount = caps->meta_raw_channel_count;
9231 int32_t ret = -1;
9232 for (size_t i = 0; i < metaRawCount; i++) {
9233 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9234 ret = i;
9235 break;
9236 }
9237 }
9238
9239 return ret;
9240}
9241
9242/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009243 * FUNCTION : initStaticMetadata
9244 *
9245 * DESCRIPTION: initialize the static metadata
9246 *
9247 * PARAMETERS :
9248 * @cameraId : camera Id
9249 *
9250 * RETURN : int32_t type of status
9251 * 0 -- success
9252 * non-zero failure code
9253 *==========================================================================*/
9254int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9255{
9256 int rc = 0;
9257 CameraMetadata staticInfo;
9258 size_t count = 0;
9259 bool limitedDevice = false;
9260 char prop[PROPERTY_VALUE_MAX];
9261 bool supportBurst = false;
9262
9263 supportBurst = supportBurstCapture(cameraId);
9264
9265 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9266 * guaranteed or if min fps of max resolution is less than 20 fps, its
9267 * advertised as limited device*/
9268 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9269 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9270 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9271 !supportBurst;
9272
9273 uint8_t supportedHwLvl = limitedDevice ?
9274 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009275#ifndef USE_HAL_3_3
9276 // LEVEL_3 - This device will support level 3.
9277 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9278#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009279 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009280#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009281
9282 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9283 &supportedHwLvl, 1);
9284
9285 bool facingBack = false;
9286 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9287 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9288 facingBack = true;
9289 }
9290 /*HAL 3 only*/
9291 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9292 &gCamCapability[cameraId]->min_focus_distance, 1);
9293
9294 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9295 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9296
9297 /*should be using focal lengths but sensor doesn't provide that info now*/
9298 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9299 &gCamCapability[cameraId]->focal_length,
9300 1);
9301
9302 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9303 gCamCapability[cameraId]->apertures,
9304 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9305
9306 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9307 gCamCapability[cameraId]->filter_densities,
9308 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9309
9310
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009311 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9312 size_t mode_count =
9313 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9314 for (size_t i = 0; i < mode_count; i++) {
9315 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9316 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009317 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009318 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009319
9320 int32_t lens_shading_map_size[] = {
9321 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9322 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9323 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9324 lens_shading_map_size,
9325 sizeof(lens_shading_map_size)/sizeof(int32_t));
9326
9327 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9328 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9329
9330 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9331 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9332
9333 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9334 &gCamCapability[cameraId]->max_frame_duration, 1);
9335
9336 camera_metadata_rational baseGainFactor = {
9337 gCamCapability[cameraId]->base_gain_factor.numerator,
9338 gCamCapability[cameraId]->base_gain_factor.denominator};
9339 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9340 &baseGainFactor, 1);
9341
9342 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9343 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9344
9345 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9346 gCamCapability[cameraId]->pixel_array_size.height};
9347 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9348 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9349
9350 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9351 gCamCapability[cameraId]->active_array_size.top,
9352 gCamCapability[cameraId]->active_array_size.width,
9353 gCamCapability[cameraId]->active_array_size.height};
9354 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9355 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9356
9357 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9358 &gCamCapability[cameraId]->white_level, 1);
9359
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009360 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9361 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9362 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009363 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009364 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009365
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009366#ifndef USE_HAL_3_3
9367 bool hasBlackRegions = false;
9368 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9369 LOGW("black_region_count: %d is bounded to %d",
9370 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9371 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9372 }
9373 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9374 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9375 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9376 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9377 }
9378 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9379 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9380 hasBlackRegions = true;
9381 }
9382#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009383 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9384 &gCamCapability[cameraId]->flash_charge_duration, 1);
9385
9386 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9387 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9388
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009389 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9390 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9391 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009392 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9393 &timestampSource, 1);
9394
Thierry Strudel54dc9782017-02-15 12:12:10 -08009395 //update histogram vendor data
9396 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009397 &gCamCapability[cameraId]->histogram_size, 1);
9398
Thierry Strudel54dc9782017-02-15 12:12:10 -08009399 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009400 &gCamCapability[cameraId]->max_histogram_count, 1);
9401
Shuzhen Wang14415f52016-11-16 18:26:18 -08009402 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9403 //so that app can request fewer number of bins than the maximum supported.
9404 std::vector<int32_t> histBins;
9405 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9406 histBins.push_back(maxHistBins);
9407 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9408 (maxHistBins & 0x1) == 0) {
9409 histBins.push_back(maxHistBins >> 1);
9410 maxHistBins >>= 1;
9411 }
9412 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9413 histBins.data(), histBins.size());
9414
Thierry Strudel3d639192016-09-09 11:52:26 -07009415 int32_t sharpness_map_size[] = {
9416 gCamCapability[cameraId]->sharpness_map_size.width,
9417 gCamCapability[cameraId]->sharpness_map_size.height};
9418
9419 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9420 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9421
9422 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9423 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9424
Emilian Peev0f3c3162017-03-15 12:57:46 +00009425 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9426 if (0 <= indexPD) {
9427 // Advertise PD stats data as part of the Depth capabilities
9428 int32_t depthWidth =
9429 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9430 int32_t depthHeight =
9431 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009432 int32_t depthStride =
9433 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009434 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9435 assert(0 < depthSamplesCount);
9436 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9437 &depthSamplesCount, 1);
9438
9439 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9440 depthHeight,
9441 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9442 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9443 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9444 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9445 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9446
9447 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9448 depthHeight, 33333333,
9449 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9450 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9451 depthMinDuration,
9452 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9453
9454 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9455 depthHeight, 0,
9456 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9457 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9458 depthStallDuration,
9459 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9460
9461 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9462 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009463
9464 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9465 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9466 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009467 }
9468
Thierry Strudel3d639192016-09-09 11:52:26 -07009469 int32_t scalar_formats[] = {
9470 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9471 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9472 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9473 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9474 HAL_PIXEL_FORMAT_RAW10,
9475 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009476 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9477 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9478 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009479
9480 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9481 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9482 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9483 count, MAX_SIZES_CNT, available_processed_sizes);
9484 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9485 available_processed_sizes, count * 2);
9486
9487 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9488 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9489 makeTable(gCamCapability[cameraId]->raw_dim,
9490 count, MAX_SIZES_CNT, available_raw_sizes);
9491 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9492 available_raw_sizes, count * 2);
9493
9494 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9495 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9496 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9497 count, MAX_SIZES_CNT, available_fps_ranges);
9498 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9499 available_fps_ranges, count * 2);
9500
9501 camera_metadata_rational exposureCompensationStep = {
9502 gCamCapability[cameraId]->exp_compensation_step.numerator,
9503 gCamCapability[cameraId]->exp_compensation_step.denominator};
9504 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9505 &exposureCompensationStep, 1);
9506
9507 Vector<uint8_t> availableVstabModes;
9508 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9509 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009510 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009511 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009512 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009513 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009514 count = IS_TYPE_MAX;
9515 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9516 for (size_t i = 0; i < count; i++) {
9517 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9518 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9519 eisSupported = true;
9520 break;
9521 }
9522 }
9523 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009524 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9525 }
9526 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9527 availableVstabModes.array(), availableVstabModes.size());
9528
9529 /*HAL 1 and HAL 3 common*/
9530 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9531 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9532 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009533 // Cap the max zoom to the max preferred value
9534 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009535 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9536 &maxZoom, 1);
9537
9538 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9539 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9540
9541 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9542 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9543 max3aRegions[2] = 0; /* AF not supported */
9544 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9545 max3aRegions, 3);
9546
9547 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9548 memset(prop, 0, sizeof(prop));
9549 property_get("persist.camera.facedetect", prop, "1");
9550 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9551 LOGD("Support face detection mode: %d",
9552 supportedFaceDetectMode);
9553
9554 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009555 /* support mode should be OFF if max number of face is 0 */
9556 if (maxFaces <= 0) {
9557 supportedFaceDetectMode = 0;
9558 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009559 Vector<uint8_t> availableFaceDetectModes;
9560 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9561 if (supportedFaceDetectMode == 1) {
9562 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9563 } else if (supportedFaceDetectMode == 2) {
9564 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9565 } else if (supportedFaceDetectMode == 3) {
9566 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9567 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9568 } else {
9569 maxFaces = 0;
9570 }
9571 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9572 availableFaceDetectModes.array(),
9573 availableFaceDetectModes.size());
9574 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9575 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009576 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9577 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9578 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009579
9580 int32_t exposureCompensationRange[] = {
9581 gCamCapability[cameraId]->exposure_compensation_min,
9582 gCamCapability[cameraId]->exposure_compensation_max};
9583 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9584 exposureCompensationRange,
9585 sizeof(exposureCompensationRange)/sizeof(int32_t));
9586
9587 uint8_t lensFacing = (facingBack) ?
9588 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9589 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9590
9591 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9592 available_thumbnail_sizes,
9593 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9594
9595 /*all sizes will be clubbed into this tag*/
9596 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9597 /*android.scaler.availableStreamConfigurations*/
9598 Vector<int32_t> available_stream_configs;
9599 cam_dimension_t active_array_dim;
9600 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9601 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009602
9603 /*advertise list of input dimensions supported based on below property.
9604 By default all sizes upto 5MP will be advertised.
9605 Note that the setprop resolution format should be WxH.
9606 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9607 To list all supported sizes, setprop needs to be set with "0x0" */
9608 cam_dimension_t minInputSize = {2592,1944}; //5MP
9609 memset(prop, 0, sizeof(prop));
9610 property_get("persist.camera.input.minsize", prop, "2592x1944");
9611 if (strlen(prop) > 0) {
9612 char *saveptr = NULL;
9613 char *token = strtok_r(prop, "x", &saveptr);
9614 if (token != NULL) {
9615 minInputSize.width = atoi(token);
9616 }
9617 token = strtok_r(NULL, "x", &saveptr);
9618 if (token != NULL) {
9619 minInputSize.height = atoi(token);
9620 }
9621 }
9622
Thierry Strudel3d639192016-09-09 11:52:26 -07009623 /* Add input/output stream configurations for each scalar formats*/
9624 for (size_t j = 0; j < scalar_formats_count; j++) {
9625 switch (scalar_formats[j]) {
9626 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9627 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9628 case HAL_PIXEL_FORMAT_RAW10:
9629 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9630 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9631 addStreamConfig(available_stream_configs, scalar_formats[j],
9632 gCamCapability[cameraId]->raw_dim[i],
9633 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9634 }
9635 break;
9636 case HAL_PIXEL_FORMAT_BLOB:
9637 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9638 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9639 addStreamConfig(available_stream_configs, scalar_formats[j],
9640 gCamCapability[cameraId]->picture_sizes_tbl[i],
9641 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9642 }
9643 break;
9644 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9645 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9646 default:
9647 cam_dimension_t largest_picture_size;
9648 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9649 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9650 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9651 addStreamConfig(available_stream_configs, scalar_formats[j],
9652 gCamCapability[cameraId]->picture_sizes_tbl[i],
9653 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009654 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009655 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9656 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009657 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9658 >= minInputSize.width) || (gCamCapability[cameraId]->
9659 picture_sizes_tbl[i].height >= minInputSize.height)) {
9660 addStreamConfig(available_stream_configs, scalar_formats[j],
9661 gCamCapability[cameraId]->picture_sizes_tbl[i],
9662 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9663 }
9664 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009665 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009666
Thierry Strudel3d639192016-09-09 11:52:26 -07009667 break;
9668 }
9669 }
9670
9671 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9672 available_stream_configs.array(), available_stream_configs.size());
9673 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9674 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9675
9676 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9677 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9678
9679 /* android.scaler.availableMinFrameDurations */
9680 Vector<int64_t> available_min_durations;
9681 for (size_t j = 0; j < scalar_formats_count; j++) {
9682 switch (scalar_formats[j]) {
9683 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9684 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9685 case HAL_PIXEL_FORMAT_RAW10:
9686 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9687 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9688 available_min_durations.add(scalar_formats[j]);
9689 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9690 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9691 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9692 }
9693 break;
9694 default:
9695 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9696 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9697 available_min_durations.add(scalar_formats[j]);
9698 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9699 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9700 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9701 }
9702 break;
9703 }
9704 }
9705 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9706 available_min_durations.array(), available_min_durations.size());
9707
9708 Vector<int32_t> available_hfr_configs;
9709 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9710 int32_t fps = 0;
9711 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9712 case CAM_HFR_MODE_60FPS:
9713 fps = 60;
9714 break;
9715 case CAM_HFR_MODE_90FPS:
9716 fps = 90;
9717 break;
9718 case CAM_HFR_MODE_120FPS:
9719 fps = 120;
9720 break;
9721 case CAM_HFR_MODE_150FPS:
9722 fps = 150;
9723 break;
9724 case CAM_HFR_MODE_180FPS:
9725 fps = 180;
9726 break;
9727 case CAM_HFR_MODE_210FPS:
9728 fps = 210;
9729 break;
9730 case CAM_HFR_MODE_240FPS:
9731 fps = 240;
9732 break;
9733 case CAM_HFR_MODE_480FPS:
9734 fps = 480;
9735 break;
9736 case CAM_HFR_MODE_OFF:
9737 case CAM_HFR_MODE_MAX:
9738 default:
9739 break;
9740 }
9741
9742 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9743 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9744 /* For each HFR frame rate, need to advertise one variable fps range
9745 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9746 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9747 * set by the app. When video recording is started, [120, 120] is
9748 * set. This way sensor configuration does not change when recording
9749 * is started */
9750
9751 /* (width, height, fps_min, fps_max, batch_size_max) */
9752 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9753 j < MAX_SIZES_CNT; j++) {
9754 available_hfr_configs.add(
9755 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9756 available_hfr_configs.add(
9757 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9758 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9759 available_hfr_configs.add(fps);
9760 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9761
9762 /* (width, height, fps_min, fps_max, batch_size_max) */
9763 available_hfr_configs.add(
9764 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9765 available_hfr_configs.add(
9766 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9767 available_hfr_configs.add(fps);
9768 available_hfr_configs.add(fps);
9769 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9770 }
9771 }
9772 }
9773 //Advertise HFR capability only if the property is set
9774 memset(prop, 0, sizeof(prop));
9775 property_get("persist.camera.hal3hfr.enable", prop, "1");
9776 uint8_t hfrEnable = (uint8_t)atoi(prop);
9777
9778 if(hfrEnable && available_hfr_configs.array()) {
9779 staticInfo.update(
9780 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9781 available_hfr_configs.array(), available_hfr_configs.size());
9782 }
9783
9784 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9785 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9786 &max_jpeg_size, 1);
9787
9788 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9789 size_t size = 0;
9790 count = CAM_EFFECT_MODE_MAX;
9791 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9792 for (size_t i = 0; i < count; i++) {
9793 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9794 gCamCapability[cameraId]->supported_effects[i]);
9795 if (NAME_NOT_FOUND != val) {
9796 avail_effects[size] = (uint8_t)val;
9797 size++;
9798 }
9799 }
9800 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9801 avail_effects,
9802 size);
9803
9804 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9805 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9806 size_t supported_scene_modes_cnt = 0;
9807 count = CAM_SCENE_MODE_MAX;
9808 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9809 for (size_t i = 0; i < count; i++) {
9810 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9811 CAM_SCENE_MODE_OFF) {
9812 int val = lookupFwkName(SCENE_MODES_MAP,
9813 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9814 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009815
Thierry Strudel3d639192016-09-09 11:52:26 -07009816 if (NAME_NOT_FOUND != val) {
9817 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9818 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9819 supported_scene_modes_cnt++;
9820 }
9821 }
9822 }
9823 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9824 avail_scene_modes,
9825 supported_scene_modes_cnt);
9826
9827 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9828 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9829 supported_scene_modes_cnt,
9830 CAM_SCENE_MODE_MAX,
9831 scene_mode_overrides,
9832 supported_indexes,
9833 cameraId);
9834
9835 if (supported_scene_modes_cnt == 0) {
9836 supported_scene_modes_cnt = 1;
9837 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9838 }
9839
9840 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9841 scene_mode_overrides, supported_scene_modes_cnt * 3);
9842
9843 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9844 ANDROID_CONTROL_MODE_AUTO,
9845 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9846 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9847 available_control_modes,
9848 3);
9849
9850 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9851 size = 0;
9852 count = CAM_ANTIBANDING_MODE_MAX;
9853 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9854 for (size_t i = 0; i < count; i++) {
9855 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9856 gCamCapability[cameraId]->supported_antibandings[i]);
9857 if (NAME_NOT_FOUND != val) {
9858 avail_antibanding_modes[size] = (uint8_t)val;
9859 size++;
9860 }
9861
9862 }
9863 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9864 avail_antibanding_modes,
9865 size);
9866
9867 uint8_t avail_abberation_modes[] = {
9868 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9869 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9870 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9871 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9872 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9873 if (0 == count) {
9874 // If no aberration correction modes are available for a device, this advertise OFF mode
9875 size = 1;
9876 } else {
9877 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9878 // So, advertize all 3 modes if atleast any one mode is supported as per the
9879 // new M requirement
9880 size = 3;
9881 }
9882 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9883 avail_abberation_modes,
9884 size);
9885
9886 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9887 size = 0;
9888 count = CAM_FOCUS_MODE_MAX;
9889 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9890 for (size_t i = 0; i < count; i++) {
9891 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9892 gCamCapability[cameraId]->supported_focus_modes[i]);
9893 if (NAME_NOT_FOUND != val) {
9894 avail_af_modes[size] = (uint8_t)val;
9895 size++;
9896 }
9897 }
9898 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9899 avail_af_modes,
9900 size);
9901
9902 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9903 size = 0;
9904 count = CAM_WB_MODE_MAX;
9905 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9906 for (size_t i = 0; i < count; i++) {
9907 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9908 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9909 gCamCapability[cameraId]->supported_white_balances[i]);
9910 if (NAME_NOT_FOUND != val) {
9911 avail_awb_modes[size] = (uint8_t)val;
9912 size++;
9913 }
9914 }
9915 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9916 avail_awb_modes,
9917 size);
9918
9919 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9920 count = CAM_FLASH_FIRING_LEVEL_MAX;
9921 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9922 count);
9923 for (size_t i = 0; i < count; i++) {
9924 available_flash_levels[i] =
9925 gCamCapability[cameraId]->supported_firing_levels[i];
9926 }
9927 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9928 available_flash_levels, count);
9929
9930 uint8_t flashAvailable;
9931 if (gCamCapability[cameraId]->flash_available)
9932 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9933 else
9934 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9935 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9936 &flashAvailable, 1);
9937
9938 Vector<uint8_t> avail_ae_modes;
9939 count = CAM_AE_MODE_MAX;
9940 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9941 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009942 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9943 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9944 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9945 }
9946 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009947 }
9948 if (flashAvailable) {
9949 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9950 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9951 }
9952 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9953 avail_ae_modes.array(),
9954 avail_ae_modes.size());
9955
9956 int32_t sensitivity_range[2];
9957 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9958 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9959 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9960 sensitivity_range,
9961 sizeof(sensitivity_range) / sizeof(int32_t));
9962
9963 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9964 &gCamCapability[cameraId]->max_analog_sensitivity,
9965 1);
9966
9967 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9968 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9969 &sensor_orientation,
9970 1);
9971
9972 int32_t max_output_streams[] = {
9973 MAX_STALLING_STREAMS,
9974 MAX_PROCESSED_STREAMS,
9975 MAX_RAW_STREAMS};
9976 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9977 max_output_streams,
9978 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9979
9980 uint8_t avail_leds = 0;
9981 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9982 &avail_leds, 0);
9983
9984 uint8_t focus_dist_calibrated;
9985 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9986 gCamCapability[cameraId]->focus_dist_calibrated);
9987 if (NAME_NOT_FOUND != val) {
9988 focus_dist_calibrated = (uint8_t)val;
9989 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9990 &focus_dist_calibrated, 1);
9991 }
9992
9993 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9994 size = 0;
9995 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9996 MAX_TEST_PATTERN_CNT);
9997 for (size_t i = 0; i < count; i++) {
9998 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9999 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10000 if (NAME_NOT_FOUND != testpatternMode) {
10001 avail_testpattern_modes[size] = testpatternMode;
10002 size++;
10003 }
10004 }
10005 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10006 avail_testpattern_modes,
10007 size);
10008
10009 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10010 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10011 &max_pipeline_depth,
10012 1);
10013
10014 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10015 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10016 &partial_result_count,
10017 1);
10018
10019 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10020 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10021
10022 Vector<uint8_t> available_capabilities;
10023 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10024 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10025 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10026 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10027 if (supportBurst) {
10028 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10029 }
10030 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10031 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10032 if (hfrEnable && available_hfr_configs.array()) {
10033 available_capabilities.add(
10034 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10035 }
10036
10037 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10038 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10039 }
10040 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10041 available_capabilities.array(),
10042 available_capabilities.size());
10043
10044 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10045 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10046 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10047 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10048
10049 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10050 &aeLockAvailable, 1);
10051
10052 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10053 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10054 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10055 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10056
10057 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10058 &awbLockAvailable, 1);
10059
10060 int32_t max_input_streams = 1;
10061 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10062 &max_input_streams,
10063 1);
10064
10065 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10066 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10067 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10068 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10069 HAL_PIXEL_FORMAT_YCbCr_420_888};
10070 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10071 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10072
10073 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10074 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10075 &max_latency,
10076 1);
10077
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010078#ifndef USE_HAL_3_3
10079 int32_t isp_sensitivity_range[2];
10080 isp_sensitivity_range[0] =
10081 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10082 isp_sensitivity_range[1] =
10083 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10084 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10085 isp_sensitivity_range,
10086 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10087#endif
10088
Thierry Strudel3d639192016-09-09 11:52:26 -070010089 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10090 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10091 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10092 available_hot_pixel_modes,
10093 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10094
10095 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10096 ANDROID_SHADING_MODE_FAST,
10097 ANDROID_SHADING_MODE_HIGH_QUALITY};
10098 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10099 available_shading_modes,
10100 3);
10101
10102 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10103 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10104 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10105 available_lens_shading_map_modes,
10106 2);
10107
10108 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10109 ANDROID_EDGE_MODE_FAST,
10110 ANDROID_EDGE_MODE_HIGH_QUALITY,
10111 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10112 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10113 available_edge_modes,
10114 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10115
10116 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10117 ANDROID_NOISE_REDUCTION_MODE_FAST,
10118 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10119 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10120 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10121 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10122 available_noise_red_modes,
10123 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10124
10125 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10126 ANDROID_TONEMAP_MODE_FAST,
10127 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10128 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10129 available_tonemap_modes,
10130 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10131
10132 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10133 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10134 available_hot_pixel_map_modes,
10135 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10136
10137 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10138 gCamCapability[cameraId]->reference_illuminant1);
10139 if (NAME_NOT_FOUND != val) {
10140 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10141 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10142 }
10143
10144 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10145 gCamCapability[cameraId]->reference_illuminant2);
10146 if (NAME_NOT_FOUND != val) {
10147 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10148 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10149 }
10150
10151 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10152 (void *)gCamCapability[cameraId]->forward_matrix1,
10153 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10154
10155 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10156 (void *)gCamCapability[cameraId]->forward_matrix2,
10157 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10158
10159 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10160 (void *)gCamCapability[cameraId]->color_transform1,
10161 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10162
10163 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10164 (void *)gCamCapability[cameraId]->color_transform2,
10165 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10166
10167 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10168 (void *)gCamCapability[cameraId]->calibration_transform1,
10169 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10170
10171 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10172 (void *)gCamCapability[cameraId]->calibration_transform2,
10173 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10174
10175 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10176 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10177 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10178 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10179 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10180 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10181 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10182 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10183 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10184 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10185 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10186 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10187 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10188 ANDROID_JPEG_GPS_COORDINATES,
10189 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10190 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10191 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10192 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10193 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10194 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10195 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10196 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10197 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10198 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010199#ifndef USE_HAL_3_3
10200 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10201#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010202 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010203 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010204 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10205 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010206 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010207 /* DevCamDebug metadata request_keys_basic */
10208 DEVCAMDEBUG_META_ENABLE,
10209 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010210 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010211 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010212 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010213 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010214 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010215 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010216
10217 size_t request_keys_cnt =
10218 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10219 Vector<int32_t> available_request_keys;
10220 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10221 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10222 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10223 }
10224
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010225 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010226 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010227 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070010228 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
Chien-Yu Chenec328c82017-08-30 16:41:08 -070010229 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010230 }
10231
Thierry Strudel3d639192016-09-09 11:52:26 -070010232 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10233 available_request_keys.array(), available_request_keys.size());
10234
10235 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10236 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10237 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10238 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10239 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10240 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10241 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10242 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10243 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10244 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10245 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10246 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10247 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10248 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10249 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10250 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10251 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010252 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010253 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10254 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10255 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010256 ANDROID_STATISTICS_FACE_SCORES,
10257#ifndef USE_HAL_3_3
10258 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10259#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010260 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010261 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010262 // DevCamDebug metadata result_keys_basic
10263 DEVCAMDEBUG_META_ENABLE,
10264 // DevCamDebug metadata result_keys AF
10265 DEVCAMDEBUG_AF_LENS_POSITION,
10266 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10267 DEVCAMDEBUG_AF_TOF_DISTANCE,
10268 DEVCAMDEBUG_AF_LUMA,
10269 DEVCAMDEBUG_AF_HAF_STATE,
10270 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10271 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10272 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10273 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10274 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10275 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10276 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10277 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10278 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10279 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10280 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10281 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10282 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10283 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10284 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10285 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10286 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10287 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10288 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10289 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10290 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10291 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10292 // DevCamDebug metadata result_keys AEC
10293 DEVCAMDEBUG_AEC_TARGET_LUMA,
10294 DEVCAMDEBUG_AEC_COMP_LUMA,
10295 DEVCAMDEBUG_AEC_AVG_LUMA,
10296 DEVCAMDEBUG_AEC_CUR_LUMA,
10297 DEVCAMDEBUG_AEC_LINECOUNT,
10298 DEVCAMDEBUG_AEC_REAL_GAIN,
10299 DEVCAMDEBUG_AEC_EXP_INDEX,
10300 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010301 // DevCamDebug metadata result_keys zzHDR
10302 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10303 DEVCAMDEBUG_AEC_L_LINECOUNT,
10304 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10305 DEVCAMDEBUG_AEC_S_LINECOUNT,
10306 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10307 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10308 // DevCamDebug metadata result_keys ADRC
10309 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10310 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10311 DEVCAMDEBUG_AEC_GTM_RATIO,
10312 DEVCAMDEBUG_AEC_LTM_RATIO,
10313 DEVCAMDEBUG_AEC_LA_RATIO,
10314 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010315 // DevCamDebug metadata result_keys AEC MOTION
10316 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10317 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10318 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010319 // DevCamDebug metadata result_keys AWB
10320 DEVCAMDEBUG_AWB_R_GAIN,
10321 DEVCAMDEBUG_AWB_G_GAIN,
10322 DEVCAMDEBUG_AWB_B_GAIN,
10323 DEVCAMDEBUG_AWB_CCT,
10324 DEVCAMDEBUG_AWB_DECISION,
10325 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010326 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10327 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10328 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010329 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010330 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010331 };
10332
Thierry Strudel3d639192016-09-09 11:52:26 -070010333 size_t result_keys_cnt =
10334 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10335
10336 Vector<int32_t> available_result_keys;
10337 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10338 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10339 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10340 }
10341 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10342 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10343 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10344 }
10345 if (supportedFaceDetectMode == 1) {
10346 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10347 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10348 } else if ((supportedFaceDetectMode == 2) ||
10349 (supportedFaceDetectMode == 3)) {
10350 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10351 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10352 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010353#ifndef USE_HAL_3_3
10354 if (hasBlackRegions) {
10355 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10356 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10357 }
10358#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010359
10360 if (gExposeEnableZslKey) {
10361 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chendaf68892017-08-25 12:56:40 -070010362 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010363 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
10364 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010365 }
10366
Thierry Strudel3d639192016-09-09 11:52:26 -070010367 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10368 available_result_keys.array(), available_result_keys.size());
10369
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010370 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010371 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10372 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10373 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10374 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10375 ANDROID_SCALER_CROPPING_TYPE,
10376 ANDROID_SYNC_MAX_LATENCY,
10377 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10378 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10379 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10380 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10381 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10382 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10383 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10384 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10385 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10386 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10387 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10388 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10389 ANDROID_LENS_FACING,
10390 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10391 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10392 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10393 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10394 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10395 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10396 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10397 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10398 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10399 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10400 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10401 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10402 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10403 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10404 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10405 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10406 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10407 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10408 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10409 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010410 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010411 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10412 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10413 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10414 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10415 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10416 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10417 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10418 ANDROID_CONTROL_AVAILABLE_MODES,
10419 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10420 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10421 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10422 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010423 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10424#ifndef USE_HAL_3_3
10425 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10426 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10427#endif
10428 };
10429
10430 Vector<int32_t> available_characteristics_keys;
10431 available_characteristics_keys.appendArray(characteristics_keys_basic,
10432 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10433#ifndef USE_HAL_3_3
10434 if (hasBlackRegions) {
10435 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10436 }
10437#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010438
10439 if (0 <= indexPD) {
10440 int32_t depthKeys[] = {
10441 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10442 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10443 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10444 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10445 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10446 };
10447 available_characteristics_keys.appendArray(depthKeys,
10448 sizeof(depthKeys) / sizeof(depthKeys[0]));
10449 }
10450
Thierry Strudel3d639192016-09-09 11:52:26 -070010451 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010452 available_characteristics_keys.array(),
10453 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010454
10455 /*available stall durations depend on the hw + sw and will be different for different devices */
10456 /*have to add for raw after implementation*/
10457 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10458 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10459
10460 Vector<int64_t> available_stall_durations;
10461 for (uint32_t j = 0; j < stall_formats_count; j++) {
10462 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10463 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10464 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10465 available_stall_durations.add(stall_formats[j]);
10466 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10467 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10468 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10469 }
10470 } else {
10471 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10472 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10473 available_stall_durations.add(stall_formats[j]);
10474 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10475 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10476 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10477 }
10478 }
10479 }
10480 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10481 available_stall_durations.array(),
10482 available_stall_durations.size());
10483
10484 //QCAMERA3_OPAQUE_RAW
10485 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10486 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10487 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10488 case LEGACY_RAW:
10489 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10490 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10491 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10492 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10493 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10494 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10495 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10496 break;
10497 case MIPI_RAW:
10498 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10499 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10500 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10501 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10502 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10503 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10504 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10505 break;
10506 default:
10507 LOGE("unknown opaque_raw_format %d",
10508 gCamCapability[cameraId]->opaque_raw_fmt);
10509 break;
10510 }
10511 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10512
10513 Vector<int32_t> strides;
10514 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10515 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10516 cam_stream_buf_plane_info_t buf_planes;
10517 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10518 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10519 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10520 &gCamCapability[cameraId]->padding_info, &buf_planes);
10521 strides.add(buf_planes.plane_info.mp[0].stride);
10522 }
10523 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10524 strides.size());
10525
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010526 //TBD: remove the following line once backend advertises zzHDR in feature mask
10527 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010528 //Video HDR default
10529 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10530 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010531 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010532 int32_t vhdr_mode[] = {
10533 QCAMERA3_VIDEO_HDR_MODE_OFF,
10534 QCAMERA3_VIDEO_HDR_MODE_ON};
10535
10536 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10537 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10538 vhdr_mode, vhdr_mode_count);
10539 }
10540
Thierry Strudel3d639192016-09-09 11:52:26 -070010541 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10542 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10543 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10544
10545 uint8_t isMonoOnly =
10546 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10547 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10548 &isMonoOnly, 1);
10549
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010550#ifndef USE_HAL_3_3
10551 Vector<int32_t> opaque_size;
10552 for (size_t j = 0; j < scalar_formats_count; j++) {
10553 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10554 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10555 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10556 cam_stream_buf_plane_info_t buf_planes;
10557
10558 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10559 &gCamCapability[cameraId]->padding_info, &buf_planes);
10560
10561 if (rc == 0) {
10562 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10563 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10564 opaque_size.add(buf_planes.plane_info.frame_len);
10565 }else {
10566 LOGE("raw frame calculation failed!");
10567 }
10568 }
10569 }
10570 }
10571
10572 if ((opaque_size.size() > 0) &&
10573 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10574 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10575 else
10576 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10577#endif
10578
Thierry Strudel04e026f2016-10-10 11:27:36 -070010579 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10580 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10581 size = 0;
10582 count = CAM_IR_MODE_MAX;
10583 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10584 for (size_t i = 0; i < count; i++) {
10585 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10586 gCamCapability[cameraId]->supported_ir_modes[i]);
10587 if (NAME_NOT_FOUND != val) {
10588 avail_ir_modes[size] = (int32_t)val;
10589 size++;
10590 }
10591 }
10592 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10593 avail_ir_modes, size);
10594 }
10595
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010596 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10597 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10598 size = 0;
10599 count = CAM_AEC_CONVERGENCE_MAX;
10600 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10601 for (size_t i = 0; i < count; i++) {
10602 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10603 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10604 if (NAME_NOT_FOUND != val) {
10605 available_instant_aec_modes[size] = (int32_t)val;
10606 size++;
10607 }
10608 }
10609 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10610 available_instant_aec_modes, size);
10611 }
10612
Thierry Strudel54dc9782017-02-15 12:12:10 -080010613 int32_t sharpness_range[] = {
10614 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10615 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10616 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10617
10618 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10619 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10620 size = 0;
10621 count = CAM_BINNING_CORRECTION_MODE_MAX;
10622 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10623 for (size_t i = 0; i < count; i++) {
10624 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10625 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10626 gCamCapability[cameraId]->supported_binning_modes[i]);
10627 if (NAME_NOT_FOUND != val) {
10628 avail_binning_modes[size] = (int32_t)val;
10629 size++;
10630 }
10631 }
10632 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10633 avail_binning_modes, size);
10634 }
10635
10636 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10637 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10638 size = 0;
10639 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10640 for (size_t i = 0; i < count; i++) {
10641 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10642 gCamCapability[cameraId]->supported_aec_modes[i]);
10643 if (NAME_NOT_FOUND != val)
10644 available_aec_modes[size++] = val;
10645 }
10646 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10647 available_aec_modes, size);
10648 }
10649
10650 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10651 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10652 size = 0;
10653 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10654 for (size_t i = 0; i < count; i++) {
10655 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10656 gCamCapability[cameraId]->supported_iso_modes[i]);
10657 if (NAME_NOT_FOUND != val)
10658 available_iso_modes[size++] = val;
10659 }
10660 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10661 available_iso_modes, size);
10662 }
10663
10664 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010665 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010666 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10667 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10668 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10669
10670 int32_t available_saturation_range[4];
10671 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10672 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10673 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10674 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10675 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10676 available_saturation_range, 4);
10677
10678 uint8_t is_hdr_values[2];
10679 is_hdr_values[0] = 0;
10680 is_hdr_values[1] = 1;
10681 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10682 is_hdr_values, 2);
10683
10684 float is_hdr_confidence_range[2];
10685 is_hdr_confidence_range[0] = 0.0;
10686 is_hdr_confidence_range[1] = 1.0;
10687 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10688 is_hdr_confidence_range, 2);
10689
Emilian Peev0a972ef2017-03-16 10:25:53 +000010690 size_t eepromLength = strnlen(
10691 reinterpret_cast<const char *>(
10692 gCamCapability[cameraId]->eeprom_version_info),
10693 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10694 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010695 char easelInfo[] = ",E:N";
10696 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10697 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10698 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010699 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
Arnd Geis082a4d72017-08-24 10:33:07 -070010700 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-ver" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010701 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010702 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010703 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10704 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10705 }
10706
Thierry Strudel3d639192016-09-09 11:52:26 -070010707 gStaticMetadata[cameraId] = staticInfo.release();
10708 return rc;
10709}
10710
10711/*===========================================================================
10712 * FUNCTION : makeTable
10713 *
10714 * DESCRIPTION: make a table of sizes
10715 *
10716 * PARAMETERS :
10717 *
10718 *
10719 *==========================================================================*/
10720void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10721 size_t max_size, int32_t *sizeTable)
10722{
10723 size_t j = 0;
10724 if (size > max_size) {
10725 size = max_size;
10726 }
10727 for (size_t i = 0; i < size; i++) {
10728 sizeTable[j] = dimTable[i].width;
10729 sizeTable[j+1] = dimTable[i].height;
10730 j+=2;
10731 }
10732}
10733
10734/*===========================================================================
10735 * FUNCTION : makeFPSTable
10736 *
10737 * DESCRIPTION: make a table of fps ranges
10738 *
10739 * PARAMETERS :
10740 *
10741 *==========================================================================*/
10742void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10743 size_t max_size, int32_t *fpsRangesTable)
10744{
10745 size_t j = 0;
10746 if (size > max_size) {
10747 size = max_size;
10748 }
10749 for (size_t i = 0; i < size; i++) {
10750 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10751 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10752 j+=2;
10753 }
10754}
10755
10756/*===========================================================================
10757 * FUNCTION : makeOverridesList
10758 *
10759 * DESCRIPTION: make a list of scene mode overrides
10760 *
10761 * PARAMETERS :
10762 *
10763 *
10764 *==========================================================================*/
10765void QCamera3HardwareInterface::makeOverridesList(
10766 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10767 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10768{
10769 /*daemon will give a list of overrides for all scene modes.
10770 However we should send the fwk only the overrides for the scene modes
10771 supported by the framework*/
10772 size_t j = 0;
10773 if (size > max_size) {
10774 size = max_size;
10775 }
10776 size_t focus_count = CAM_FOCUS_MODE_MAX;
10777 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10778 focus_count);
10779 for (size_t i = 0; i < size; i++) {
10780 bool supt = false;
10781 size_t index = supported_indexes[i];
10782 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10783 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10784 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10785 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10786 overridesTable[index].awb_mode);
10787 if (NAME_NOT_FOUND != val) {
10788 overridesList[j+1] = (uint8_t)val;
10789 }
10790 uint8_t focus_override = overridesTable[index].af_mode;
10791 for (size_t k = 0; k < focus_count; k++) {
10792 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10793 supt = true;
10794 break;
10795 }
10796 }
10797 if (supt) {
10798 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10799 focus_override);
10800 if (NAME_NOT_FOUND != val) {
10801 overridesList[j+2] = (uint8_t)val;
10802 }
10803 } else {
10804 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10805 }
10806 j+=3;
10807 }
10808}
10809
10810/*===========================================================================
10811 * FUNCTION : filterJpegSizes
10812 *
10813 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10814 * could be downscaled to
10815 *
10816 * PARAMETERS :
10817 *
10818 * RETURN : length of jpegSizes array
10819 *==========================================================================*/
10820
10821size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10822 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10823 uint8_t downscale_factor)
10824{
10825 if (0 == downscale_factor) {
10826 downscale_factor = 1;
10827 }
10828
10829 int32_t min_width = active_array_size.width / downscale_factor;
10830 int32_t min_height = active_array_size.height / downscale_factor;
10831 size_t jpegSizesCnt = 0;
10832 if (processedSizesCnt > maxCount) {
10833 processedSizesCnt = maxCount;
10834 }
10835 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10836 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10837 jpegSizes[jpegSizesCnt] = processedSizes[i];
10838 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10839 jpegSizesCnt += 2;
10840 }
10841 }
10842 return jpegSizesCnt;
10843}
10844
10845/*===========================================================================
10846 * FUNCTION : computeNoiseModelEntryS
10847 *
10848 * DESCRIPTION: function to map a given sensitivity to the S noise
10849 * model parameters in the DNG noise model.
10850 *
10851 * PARAMETERS : sens : the sensor sensitivity
10852 *
10853 ** RETURN : S (sensor amplification) noise
10854 *
10855 *==========================================================================*/
10856double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10857 double s = gCamCapability[mCameraId]->gradient_S * sens +
10858 gCamCapability[mCameraId]->offset_S;
10859 return ((s < 0.0) ? 0.0 : s);
10860}
10861
10862/*===========================================================================
10863 * FUNCTION : computeNoiseModelEntryO
10864 *
10865 * DESCRIPTION: function to map a given sensitivity to the O noise
10866 * model parameters in the DNG noise model.
10867 *
10868 * PARAMETERS : sens : the sensor sensitivity
10869 *
10870 ** RETURN : O (sensor readout) noise
10871 *
10872 *==========================================================================*/
10873double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10874 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10875 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10876 1.0 : (1.0 * sens / max_analog_sens);
10877 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10878 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10879 return ((o < 0.0) ? 0.0 : o);
10880}
10881
10882/*===========================================================================
10883 * FUNCTION : getSensorSensitivity
10884 *
10885 * DESCRIPTION: convert iso_mode to an integer value
10886 *
10887 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10888 *
10889 ** RETURN : sensitivity supported by sensor
10890 *
10891 *==========================================================================*/
10892int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10893{
10894 int32_t sensitivity;
10895
10896 switch (iso_mode) {
10897 case CAM_ISO_MODE_100:
10898 sensitivity = 100;
10899 break;
10900 case CAM_ISO_MODE_200:
10901 sensitivity = 200;
10902 break;
10903 case CAM_ISO_MODE_400:
10904 sensitivity = 400;
10905 break;
10906 case CAM_ISO_MODE_800:
10907 sensitivity = 800;
10908 break;
10909 case CAM_ISO_MODE_1600:
10910 sensitivity = 1600;
10911 break;
10912 default:
10913 sensitivity = -1;
10914 break;
10915 }
10916 return sensitivity;
10917}
10918
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010919int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010920 if (gEaselManagerClient == nullptr) {
10921 gEaselManagerClient = EaselManagerClient::create();
10922 if (gEaselManagerClient == nullptr) {
10923 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10924 return -ENODEV;
10925 }
10926 }
10927
10928 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010929 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10930 // to connect to Easel.
10931 bool doNotpowerOnEasel =
10932 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10933
10934 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010935 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10936 return OK;
10937 }
10938
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010939 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010940 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010941 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010942 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010943 return res;
10944 }
10945
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010946 EaselManagerClientOpened = true;
10947
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010948 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010949 if (res != OK) {
10950 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10951 }
10952
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010953 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010954 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010955
10956 // Expose enableZsl key only when HDR+ mode is enabled.
10957 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010958 }
10959
10960 return OK;
10961}
10962
Thierry Strudel3d639192016-09-09 11:52:26 -070010963/*===========================================================================
10964 * FUNCTION : getCamInfo
10965 *
10966 * DESCRIPTION: query camera capabilities
10967 *
10968 * PARAMETERS :
10969 * @cameraId : camera Id
10970 * @info : camera info struct to be filled in with camera capabilities
10971 *
10972 * RETURN : int type of status
10973 * NO_ERROR -- success
10974 * none-zero failure code
10975 *==========================================================================*/
10976int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10977 struct camera_info *info)
10978{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010979 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010980 int rc = 0;
10981
10982 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010983
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010984 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070010985 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010986 rc = initHdrPlusClientLocked();
10987 if (rc != OK) {
10988 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10989 pthread_mutex_unlock(&gCamLock);
10990 return rc;
10991 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010992 }
10993
Thierry Strudel3d639192016-09-09 11:52:26 -070010994 if (NULL == gCamCapability[cameraId]) {
10995 rc = initCapabilities(cameraId);
10996 if (rc < 0) {
10997 pthread_mutex_unlock(&gCamLock);
10998 return rc;
10999 }
11000 }
11001
11002 if (NULL == gStaticMetadata[cameraId]) {
11003 rc = initStaticMetadata(cameraId);
11004 if (rc < 0) {
11005 pthread_mutex_unlock(&gCamLock);
11006 return rc;
11007 }
11008 }
11009
11010 switch(gCamCapability[cameraId]->position) {
11011 case CAM_POSITION_BACK:
11012 case CAM_POSITION_BACK_AUX:
11013 info->facing = CAMERA_FACING_BACK;
11014 break;
11015
11016 case CAM_POSITION_FRONT:
11017 case CAM_POSITION_FRONT_AUX:
11018 info->facing = CAMERA_FACING_FRONT;
11019 break;
11020
11021 default:
11022 LOGE("Unknown position type %d for camera id:%d",
11023 gCamCapability[cameraId]->position, cameraId);
11024 rc = -1;
11025 break;
11026 }
11027
11028
11029 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011030#ifndef USE_HAL_3_3
11031 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11032#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011033 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011034#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011035 info->static_camera_characteristics = gStaticMetadata[cameraId];
11036
11037 //For now assume both cameras can operate independently.
11038 info->conflicting_devices = NULL;
11039 info->conflicting_devices_length = 0;
11040
11041 //resource cost is 100 * MIN(1.0, m/M),
11042 //where m is throughput requirement with maximum stream configuration
11043 //and M is CPP maximum throughput.
11044 float max_fps = 0.0;
11045 for (uint32_t i = 0;
11046 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11047 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11048 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11049 }
11050 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11051 gCamCapability[cameraId]->active_array_size.width *
11052 gCamCapability[cameraId]->active_array_size.height * max_fps /
11053 gCamCapability[cameraId]->max_pixel_bandwidth;
11054 info->resource_cost = 100 * MIN(1.0, ratio);
11055 LOGI("camera %d resource cost is %d", cameraId,
11056 info->resource_cost);
11057
11058 pthread_mutex_unlock(&gCamLock);
11059 return rc;
11060}
11061
11062/*===========================================================================
11063 * FUNCTION : translateCapabilityToMetadata
11064 *
11065 * DESCRIPTION: translate the capability into camera_metadata_t
11066 *
11067 * PARAMETERS : type of the request
11068 *
11069 *
11070 * RETURN : success: camera_metadata_t*
11071 * failure: NULL
11072 *
11073 *==========================================================================*/
11074camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11075{
11076 if (mDefaultMetadata[type] != NULL) {
11077 return mDefaultMetadata[type];
11078 }
11079 //first time we are handling this request
11080 //fill up the metadata structure using the wrapper class
11081 CameraMetadata settings;
11082 //translate from cam_capability_t to camera_metadata_tag_t
11083 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11084 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11085 int32_t defaultRequestID = 0;
11086 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11087
11088 /* OIS disable */
11089 char ois_prop[PROPERTY_VALUE_MAX];
11090 memset(ois_prop, 0, sizeof(ois_prop));
11091 property_get("persist.camera.ois.disable", ois_prop, "0");
11092 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11093
11094 /* Force video to use OIS */
11095 char videoOisProp[PROPERTY_VALUE_MAX];
11096 memset(videoOisProp, 0, sizeof(videoOisProp));
11097 property_get("persist.camera.ois.video", videoOisProp, "1");
11098 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011099
11100 // Hybrid AE enable/disable
11101 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11102 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11103 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011104 uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011105
Thierry Strudel3d639192016-09-09 11:52:26 -070011106 uint8_t controlIntent = 0;
11107 uint8_t focusMode;
11108 uint8_t vsMode;
11109 uint8_t optStabMode;
11110 uint8_t cacMode;
11111 uint8_t edge_mode;
11112 uint8_t noise_red_mode;
11113 uint8_t tonemap_mode;
11114 bool highQualityModeEntryAvailable = FALSE;
11115 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011116 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011117 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11118 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011119 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011120 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011121 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011122
Thierry Strudel3d639192016-09-09 11:52:26 -070011123 switch (type) {
11124 case CAMERA3_TEMPLATE_PREVIEW:
11125 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11126 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11127 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11128 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11129 edge_mode = ANDROID_EDGE_MODE_FAST;
11130 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11131 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11132 break;
11133 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11134 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11135 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11136 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11137 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11138 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11139 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11140 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11141 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11142 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11143 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11144 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11145 highQualityModeEntryAvailable = TRUE;
11146 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11147 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11148 fastModeEntryAvailable = TRUE;
11149 }
11150 }
11151 if (highQualityModeEntryAvailable) {
11152 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11153 } else if (fastModeEntryAvailable) {
11154 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11155 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011156 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11157 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11158 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011159 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011160 break;
11161 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11162 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11163 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11164 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011165 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11166 edge_mode = ANDROID_EDGE_MODE_FAST;
11167 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11168 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11169 if (forceVideoOis)
11170 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11171 break;
11172 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11173 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11174 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11175 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011176 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11177 edge_mode = ANDROID_EDGE_MODE_FAST;
11178 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11179 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11180 if (forceVideoOis)
11181 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11182 break;
11183 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11184 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11185 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11186 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11187 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11188 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11189 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11190 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11191 break;
11192 case CAMERA3_TEMPLATE_MANUAL:
11193 edge_mode = ANDROID_EDGE_MODE_FAST;
11194 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11195 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11196 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11197 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11198 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11199 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11200 break;
11201 default:
11202 edge_mode = ANDROID_EDGE_MODE_FAST;
11203 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11204 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11205 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11206 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11207 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11208 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11209 break;
11210 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011211 // Set CAC to OFF if underlying device doesn't support
11212 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11213 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11214 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011215 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11216 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11217 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11218 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11219 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11220 }
11221 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011222 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011223 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011224
11225 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11226 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11227 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11228 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11229 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11230 || ois_disable)
11231 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11232 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011233 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011234
11235 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11236 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11237
11238 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11239 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11240
11241 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11242 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11243
11244 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11245 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11246
11247 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11248 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11249
11250 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11251 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11252
11253 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11254 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11255
11256 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11257 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11258
11259 /*flash*/
11260 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11261 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11262
11263 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11264 settings.update(ANDROID_FLASH_FIRING_POWER,
11265 &flashFiringLevel, 1);
11266
11267 /* lens */
11268 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11269 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11270
11271 if (gCamCapability[mCameraId]->filter_densities_count) {
11272 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11273 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11274 gCamCapability[mCameraId]->filter_densities_count);
11275 }
11276
11277 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11278 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11279
Thierry Strudel3d639192016-09-09 11:52:26 -070011280 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11281 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11282
11283 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11284 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11285
11286 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11287 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11288
11289 /* face detection (default to OFF) */
11290 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11291 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11292
Thierry Strudel54dc9782017-02-15 12:12:10 -080011293 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11294 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011295
11296 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11297 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11298
11299 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11300 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11301
Thierry Strudel3d639192016-09-09 11:52:26 -070011302
11303 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11304 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11305
11306 /* Exposure time(Update the Min Exposure Time)*/
11307 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11308 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11309
11310 /* frame duration */
11311 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11312 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11313
11314 /* sensitivity */
11315 static const int32_t default_sensitivity = 100;
11316 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011317#ifndef USE_HAL_3_3
11318 static const int32_t default_isp_sensitivity =
11319 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11320 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11321#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011322
11323 /*edge mode*/
11324 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11325
11326 /*noise reduction mode*/
11327 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11328
11329 /*color correction mode*/
11330 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11331 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11332
11333 /*transform matrix mode*/
11334 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11335
11336 int32_t scaler_crop_region[4];
11337 scaler_crop_region[0] = 0;
11338 scaler_crop_region[1] = 0;
11339 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11340 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11341 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11342
11343 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11344 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11345
11346 /*focus distance*/
11347 float focus_distance = 0.0;
11348 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11349
11350 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011351 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011352 float max_range = 0.0;
11353 float max_fixed_fps = 0.0;
11354 int32_t fps_range[2] = {0, 0};
11355 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11356 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011357 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11358 TEMPLATE_MAX_PREVIEW_FPS) {
11359 continue;
11360 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011361 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11362 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11363 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11364 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11365 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11366 if (range > max_range) {
11367 fps_range[0] =
11368 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11369 fps_range[1] =
11370 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11371 max_range = range;
11372 }
11373 } else {
11374 if (range < 0.01 && max_fixed_fps <
11375 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11376 fps_range[0] =
11377 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11378 fps_range[1] =
11379 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11380 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11381 }
11382 }
11383 }
11384 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11385
11386 /*precapture trigger*/
11387 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11388 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11389
11390 /*af trigger*/
11391 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11392 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11393
11394 /* ae & af regions */
11395 int32_t active_region[] = {
11396 gCamCapability[mCameraId]->active_array_size.left,
11397 gCamCapability[mCameraId]->active_array_size.top,
11398 gCamCapability[mCameraId]->active_array_size.left +
11399 gCamCapability[mCameraId]->active_array_size.width,
11400 gCamCapability[mCameraId]->active_array_size.top +
11401 gCamCapability[mCameraId]->active_array_size.height,
11402 0};
11403 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11404 sizeof(active_region) / sizeof(active_region[0]));
11405 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11406 sizeof(active_region) / sizeof(active_region[0]));
11407
11408 /* black level lock */
11409 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11410 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11411
Thierry Strudel3d639192016-09-09 11:52:26 -070011412 //special defaults for manual template
11413 if (type == CAMERA3_TEMPLATE_MANUAL) {
11414 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11415 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11416
11417 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11418 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11419
11420 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11421 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11422
11423 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11424 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11425
11426 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11427 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11428
11429 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11430 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11431 }
11432
11433
11434 /* TNR
11435 * We'll use this location to determine which modes TNR will be set.
11436 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11437 * This is not to be confused with linking on a per stream basis that decision
11438 * is still on per-session basis and will be handled as part of config stream
11439 */
11440 uint8_t tnr_enable = 0;
11441
11442 if (m_bTnrPreview || m_bTnrVideo) {
11443
11444 switch (type) {
11445 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11446 tnr_enable = 1;
11447 break;
11448
11449 default:
11450 tnr_enable = 0;
11451 break;
11452 }
11453
11454 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11455 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11456 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11457
11458 LOGD("TNR:%d with process plate %d for template:%d",
11459 tnr_enable, tnr_process_type, type);
11460 }
11461
11462 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011463 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011464 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11465
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011466 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011467 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11468
Shuzhen Wang920ea402017-05-03 08:49:39 -070011469 uint8_t related_camera_id = mCameraId;
11470 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011471
11472 /* CDS default */
11473 char prop[PROPERTY_VALUE_MAX];
11474 memset(prop, 0, sizeof(prop));
11475 property_get("persist.camera.CDS", prop, "Auto");
11476 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11477 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11478 if (CAM_CDS_MODE_MAX == cds_mode) {
11479 cds_mode = CAM_CDS_MODE_AUTO;
11480 }
11481
11482 /* Disabling CDS in templates which have TNR enabled*/
11483 if (tnr_enable)
11484 cds_mode = CAM_CDS_MODE_OFF;
11485
11486 int32_t mode = cds_mode;
11487 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011488
Thierry Strudel269c81a2016-10-12 12:13:59 -070011489 /* Manual Convergence AEC Speed is disabled by default*/
11490 float default_aec_speed = 0;
11491 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11492
11493 /* Manual Convergence AWB Speed is disabled by default*/
11494 float default_awb_speed = 0;
11495 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11496
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011497 // Set instant AEC to normal convergence by default
11498 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11499 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11500
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011501 if (gExposeEnableZslKey) {
11502 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070011503 int32_t postview = 0;
11504 settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070011505 int32_t continuousZslCapture = 0;
11506 settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
Chien-Yu Chenec328c82017-08-30 16:41:08 -070011507 // Disable HDR+ for templates other than CAMERA3_TEMPLATE_STILL_CAPTURE.
11508 int32_t disableHdrplus = (type == CAMERA3_TEMPLATE_STILL_CAPTURE) ? 0 : 1;
11509 settings.update(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS, &disableHdrplus, 1);
11510
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011511 // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
11512 // hybrid ae is enabled for 3rd party app HDR+.
11513 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11514 type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
11515 hybrid_ae = 1;
11516 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011517 }
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011518 /* hybrid ae */
11519 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011520
Thierry Strudel3d639192016-09-09 11:52:26 -070011521 mDefaultMetadata[type] = settings.release();
11522
11523 return mDefaultMetadata[type];
11524}
11525
11526/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011527 * FUNCTION : getExpectedFrameDuration
11528 *
11529 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11530 * duration
11531 *
11532 * PARAMETERS :
11533 * @request : request settings
11534 * @frameDuration : The maximum frame duration in nanoseconds
11535 *
11536 * RETURN : None
11537 *==========================================================================*/
11538void QCamera3HardwareInterface::getExpectedFrameDuration(
11539 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11540 if (nullptr == frameDuration) {
11541 return;
11542 }
11543
11544 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11545 find_camera_metadata_ro_entry(request,
11546 ANDROID_SENSOR_EXPOSURE_TIME,
11547 &e);
11548 if (e.count > 0) {
11549 *frameDuration = e.data.i64[0];
11550 }
11551 find_camera_metadata_ro_entry(request,
11552 ANDROID_SENSOR_FRAME_DURATION,
11553 &e);
11554 if (e.count > 0) {
11555 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11556 }
11557}
11558
11559/*===========================================================================
11560 * FUNCTION : calculateMaxExpectedDuration
11561 *
11562 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11563 * current camera settings.
11564 *
11565 * PARAMETERS :
11566 * @request : request settings
11567 *
11568 * RETURN : Expected frame duration in nanoseconds.
11569 *==========================================================================*/
11570nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11571 const camera_metadata_t *request) {
11572 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11573 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11574 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11575 if (e.count == 0) {
11576 return maxExpectedDuration;
11577 }
11578
11579 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11580 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11581 }
11582
11583 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11584 return maxExpectedDuration;
11585 }
11586
11587 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11588 if (e.count == 0) {
11589 return maxExpectedDuration;
11590 }
11591
11592 switch (e.data.u8[0]) {
11593 case ANDROID_CONTROL_AE_MODE_OFF:
11594 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11595 break;
11596 default:
11597 find_camera_metadata_ro_entry(request,
11598 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11599 &e);
11600 if (e.count > 1) {
11601 maxExpectedDuration = 1e9 / e.data.u8[0];
11602 }
11603 break;
11604 }
11605
11606 return maxExpectedDuration;
11607}
11608
11609/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011610 * FUNCTION : setFrameParameters
11611 *
11612 * DESCRIPTION: set parameters per frame as requested in the metadata from
11613 * framework
11614 *
11615 * PARAMETERS :
11616 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011617 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011618 * @blob_request: Whether this request is a blob request or not
11619 *
11620 * RETURN : success: NO_ERROR
11621 * failure:
11622 *==========================================================================*/
11623int QCamera3HardwareInterface::setFrameParameters(
11624 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011625 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011626 int blob_request,
11627 uint32_t snapshotStreamId)
11628{
11629 /*translate from camera_metadata_t type to parm_type_t*/
11630 int rc = 0;
11631 int32_t hal_version = CAM_HAL_V3;
11632
11633 clear_metadata_buffer(mParameters);
11634 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11635 LOGE("Failed to set hal version in the parameters");
11636 return BAD_VALUE;
11637 }
11638
11639 /*we need to update the frame number in the parameters*/
11640 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11641 request->frame_number)) {
11642 LOGE("Failed to set the frame number in the parameters");
11643 return BAD_VALUE;
11644 }
11645
11646 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011647 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011648 LOGE("Failed to set stream type mask in the parameters");
11649 return BAD_VALUE;
11650 }
11651
11652 if (mUpdateDebugLevel) {
11653 uint32_t dummyDebugLevel = 0;
11654 /* The value of dummyDebugLevel is irrelavent. On
11655 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11656 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11657 dummyDebugLevel)) {
11658 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11659 return BAD_VALUE;
11660 }
11661 mUpdateDebugLevel = false;
11662 }
11663
11664 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011665 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011666 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11667 if (blob_request)
11668 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11669 }
11670
11671 return rc;
11672}
11673
11674/*===========================================================================
11675 * FUNCTION : setReprocParameters
11676 *
11677 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11678 * return it.
11679 *
11680 * PARAMETERS :
11681 * @request : request that needs to be serviced
11682 *
11683 * RETURN : success: NO_ERROR
11684 * failure:
11685 *==========================================================================*/
11686int32_t QCamera3HardwareInterface::setReprocParameters(
11687 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11688 uint32_t snapshotStreamId)
11689{
11690 /*translate from camera_metadata_t type to parm_type_t*/
11691 int rc = 0;
11692
11693 if (NULL == request->settings){
11694 LOGE("Reprocess settings cannot be NULL");
11695 return BAD_VALUE;
11696 }
11697
11698 if (NULL == reprocParam) {
11699 LOGE("Invalid reprocessing metadata buffer");
11700 return BAD_VALUE;
11701 }
11702 clear_metadata_buffer(reprocParam);
11703
11704 /*we need to update the frame number in the parameters*/
11705 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11706 request->frame_number)) {
11707 LOGE("Failed to set the frame number in the parameters");
11708 return BAD_VALUE;
11709 }
11710
11711 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11712 if (rc < 0) {
11713 LOGE("Failed to translate reproc request");
11714 return rc;
11715 }
11716
11717 CameraMetadata frame_settings;
11718 frame_settings = request->settings;
11719 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11720 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11721 int32_t *crop_count =
11722 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11723 int32_t *crop_data =
11724 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11725 int32_t *roi_map =
11726 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11727 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11728 cam_crop_data_t crop_meta;
11729 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11730 crop_meta.num_of_streams = 1;
11731 crop_meta.crop_info[0].crop.left = crop_data[0];
11732 crop_meta.crop_info[0].crop.top = crop_data[1];
11733 crop_meta.crop_info[0].crop.width = crop_data[2];
11734 crop_meta.crop_info[0].crop.height = crop_data[3];
11735
11736 crop_meta.crop_info[0].roi_map.left =
11737 roi_map[0];
11738 crop_meta.crop_info[0].roi_map.top =
11739 roi_map[1];
11740 crop_meta.crop_info[0].roi_map.width =
11741 roi_map[2];
11742 crop_meta.crop_info[0].roi_map.height =
11743 roi_map[3];
11744
11745 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11746 rc = BAD_VALUE;
11747 }
11748 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11749 request->input_buffer->stream,
11750 crop_meta.crop_info[0].crop.left,
11751 crop_meta.crop_info[0].crop.top,
11752 crop_meta.crop_info[0].crop.width,
11753 crop_meta.crop_info[0].crop.height);
11754 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11755 request->input_buffer->stream,
11756 crop_meta.crop_info[0].roi_map.left,
11757 crop_meta.crop_info[0].roi_map.top,
11758 crop_meta.crop_info[0].roi_map.width,
11759 crop_meta.crop_info[0].roi_map.height);
11760 } else {
11761 LOGE("Invalid reprocess crop count %d!", *crop_count);
11762 }
11763 } else {
11764 LOGE("No crop data from matching output stream");
11765 }
11766
11767 /* These settings are not needed for regular requests so handle them specially for
11768 reprocess requests; information needed for EXIF tags */
11769 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11770 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11771 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11772 if (NAME_NOT_FOUND != val) {
11773 uint32_t flashMode = (uint32_t)val;
11774 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11775 rc = BAD_VALUE;
11776 }
11777 } else {
11778 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11779 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11780 }
11781 } else {
11782 LOGH("No flash mode in reprocess settings");
11783 }
11784
11785 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11786 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11787 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11788 rc = BAD_VALUE;
11789 }
11790 } else {
11791 LOGH("No flash state in reprocess settings");
11792 }
11793
11794 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11795 uint8_t *reprocessFlags =
11796 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11797 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11798 *reprocessFlags)) {
11799 rc = BAD_VALUE;
11800 }
11801 }
11802
Thierry Strudel54dc9782017-02-15 12:12:10 -080011803 // Add exif debug data to internal metadata
11804 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11805 mm_jpeg_debug_exif_params_t *debug_params =
11806 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11807 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11808 // AE
11809 if (debug_params->ae_debug_params_valid == TRUE) {
11810 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11811 debug_params->ae_debug_params);
11812 }
11813 // AWB
11814 if (debug_params->awb_debug_params_valid == TRUE) {
11815 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11816 debug_params->awb_debug_params);
11817 }
11818 // AF
11819 if (debug_params->af_debug_params_valid == TRUE) {
11820 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11821 debug_params->af_debug_params);
11822 }
11823 // ASD
11824 if (debug_params->asd_debug_params_valid == TRUE) {
11825 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11826 debug_params->asd_debug_params);
11827 }
11828 // Stats
11829 if (debug_params->stats_debug_params_valid == TRUE) {
11830 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11831 debug_params->stats_debug_params);
11832 }
11833 // BE Stats
11834 if (debug_params->bestats_debug_params_valid == TRUE) {
11835 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11836 debug_params->bestats_debug_params);
11837 }
11838 // BHIST
11839 if (debug_params->bhist_debug_params_valid == TRUE) {
11840 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11841 debug_params->bhist_debug_params);
11842 }
11843 // 3A Tuning
11844 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11845 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11846 debug_params->q3a_tuning_debug_params);
11847 }
11848 }
11849
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011850 // Add metadata which reprocess needs
11851 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11852 cam_reprocess_info_t *repro_info =
11853 (cam_reprocess_info_t *)frame_settings.find
11854 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011855 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011856 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011857 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011858 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011859 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011860 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011861 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011862 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011863 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011864 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011865 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011866 repro_info->pipeline_flip);
11867 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11868 repro_info->af_roi);
11869 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11870 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011871 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11872 CAM_INTF_PARM_ROTATION metadata then has been added in
11873 translateToHalMetadata. HAL need to keep this new rotation
11874 metadata. Otherwise, the old rotation info saved in the vendor tag
11875 would be used */
11876 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11877 CAM_INTF_PARM_ROTATION, reprocParam) {
11878 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11879 } else {
11880 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011881 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011882 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011883 }
11884
11885 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11886 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11887 roi.width and roi.height would be the final JPEG size.
11888 For now, HAL only checks this for reprocess request */
11889 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11890 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11891 uint8_t *enable =
11892 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11893 if (*enable == TRUE) {
11894 int32_t *crop_data =
11895 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11896 cam_stream_crop_info_t crop_meta;
11897 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11898 crop_meta.stream_id = 0;
11899 crop_meta.crop.left = crop_data[0];
11900 crop_meta.crop.top = crop_data[1];
11901 crop_meta.crop.width = crop_data[2];
11902 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011903 // The JPEG crop roi should match cpp output size
11904 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11905 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11906 crop_meta.roi_map.left = 0;
11907 crop_meta.roi_map.top = 0;
11908 crop_meta.roi_map.width = cpp_crop->crop.width;
11909 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011910 }
11911 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11912 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011913 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011914 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011915 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11916 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011917 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011918 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11919
11920 // Add JPEG scale information
11921 cam_dimension_t scale_dim;
11922 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11923 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11924 int32_t *roi =
11925 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11926 scale_dim.width = roi[2];
11927 scale_dim.height = roi[3];
11928 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11929 scale_dim);
11930 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11931 scale_dim.width, scale_dim.height, mCameraId);
11932 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011933 }
11934 }
11935
11936 return rc;
11937}
11938
11939/*===========================================================================
11940 * FUNCTION : saveRequestSettings
11941 *
11942 * DESCRIPTION: Add any settings that might have changed to the request settings
11943 * and save the settings to be applied on the frame
11944 *
11945 * PARAMETERS :
11946 * @jpegMetadata : the extracted and/or modified jpeg metadata
11947 * @request : request with initial settings
11948 *
11949 * RETURN :
11950 * camera_metadata_t* : pointer to the saved request settings
11951 *==========================================================================*/
11952camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11953 const CameraMetadata &jpegMetadata,
11954 camera3_capture_request_t *request)
11955{
11956 camera_metadata_t *resultMetadata;
11957 CameraMetadata camMetadata;
11958 camMetadata = request->settings;
11959
11960 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11961 int32_t thumbnail_size[2];
11962 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11963 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11964 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11965 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11966 }
11967
11968 if (request->input_buffer != NULL) {
11969 uint8_t reprocessFlags = 1;
11970 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11971 (uint8_t*)&reprocessFlags,
11972 sizeof(reprocessFlags));
11973 }
11974
11975 resultMetadata = camMetadata.release();
11976 return resultMetadata;
11977}
11978
11979/*===========================================================================
11980 * FUNCTION : setHalFpsRange
11981 *
11982 * DESCRIPTION: set FPS range parameter
11983 *
11984 *
11985 * PARAMETERS :
11986 * @settings : Metadata from framework
11987 * @hal_metadata: Metadata buffer
11988 *
11989 *
11990 * RETURN : success: NO_ERROR
11991 * failure:
11992 *==========================================================================*/
11993int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11994 metadata_buffer_t *hal_metadata)
11995{
11996 int32_t rc = NO_ERROR;
11997 cam_fps_range_t fps_range;
11998 fps_range.min_fps = (float)
11999 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
12000 fps_range.max_fps = (float)
12001 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
12002 fps_range.video_min_fps = fps_range.min_fps;
12003 fps_range.video_max_fps = fps_range.max_fps;
12004
12005 LOGD("aeTargetFpsRange fps: [%f %f]",
12006 fps_range.min_fps, fps_range.max_fps);
12007 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
12008 * follows:
12009 * ---------------------------------------------------------------|
12010 * Video stream is absent in configure_streams |
12011 * (Camcorder preview before the first video record |
12012 * ---------------------------------------------------------------|
12013 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12014 * | | | vid_min/max_fps|
12015 * ---------------------------------------------------------------|
12016 * NO | [ 30, 240] | 240 | [240, 240] |
12017 * |-------------|-------------|----------------|
12018 * | [240, 240] | 240 | [240, 240] |
12019 * ---------------------------------------------------------------|
12020 * Video stream is present in configure_streams |
12021 * ---------------------------------------------------------------|
12022 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12023 * | | | vid_min/max_fps|
12024 * ---------------------------------------------------------------|
12025 * NO | [ 30, 240] | 240 | [240, 240] |
12026 * (camcorder prev |-------------|-------------|----------------|
12027 * after video rec | [240, 240] | 240 | [240, 240] |
12028 * is stopped) | | | |
12029 * ---------------------------------------------------------------|
12030 * YES | [ 30, 240] | 240 | [240, 240] |
12031 * |-------------|-------------|----------------|
12032 * | [240, 240] | 240 | [240, 240] |
12033 * ---------------------------------------------------------------|
12034 * When Video stream is absent in configure_streams,
12035 * preview fps = sensor_fps / batchsize
12036 * Eg: for 240fps at batchSize 4, preview = 60fps
12037 * for 120fps at batchSize 4, preview = 30fps
12038 *
12039 * When video stream is present in configure_streams, preview fps is as per
12040 * the ratio of preview buffers to video buffers requested in process
12041 * capture request
12042 */
12043 mBatchSize = 0;
12044 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12045 fps_range.min_fps = fps_range.video_max_fps;
12046 fps_range.video_min_fps = fps_range.video_max_fps;
12047 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12048 fps_range.max_fps);
12049 if (NAME_NOT_FOUND != val) {
12050 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12051 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12052 return BAD_VALUE;
12053 }
12054
12055 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12056 /* If batchmode is currently in progress and the fps changes,
12057 * set the flag to restart the sensor */
12058 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12059 (mHFRVideoFps != fps_range.max_fps)) {
12060 mNeedSensorRestart = true;
12061 }
12062 mHFRVideoFps = fps_range.max_fps;
12063 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12064 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12065 mBatchSize = MAX_HFR_BATCH_SIZE;
12066 }
12067 }
12068 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12069
12070 }
12071 } else {
12072 /* HFR mode is session param in backend/ISP. This should be reset when
12073 * in non-HFR mode */
12074 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12075 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12076 return BAD_VALUE;
12077 }
12078 }
12079 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12080 return BAD_VALUE;
12081 }
12082 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12083 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12084 return rc;
12085}
12086
12087/*===========================================================================
12088 * FUNCTION : translateToHalMetadata
12089 *
12090 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12091 *
12092 *
12093 * PARAMETERS :
12094 * @request : request sent from framework
12095 *
12096 *
12097 * RETURN : success: NO_ERROR
12098 * failure:
12099 *==========================================================================*/
12100int QCamera3HardwareInterface::translateToHalMetadata
12101 (const camera3_capture_request_t *request,
12102 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012103 uint32_t snapshotStreamId) {
12104 if (request == nullptr || hal_metadata == nullptr) {
12105 return BAD_VALUE;
12106 }
12107
12108 int64_t minFrameDuration = getMinFrameDuration(request);
12109
12110 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12111 minFrameDuration);
12112}
12113
12114int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12115 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12116 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12117
Thierry Strudel3d639192016-09-09 11:52:26 -070012118 int rc = 0;
12119 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012120 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012121
12122 /* Do not change the order of the following list unless you know what you are
12123 * doing.
12124 * The order is laid out in such a way that parameters in the front of the table
12125 * may be used to override the parameters later in the table. Examples are:
12126 * 1. META_MODE should precede AEC/AWB/AF MODE
12127 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12128 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12129 * 4. Any mode should precede it's corresponding settings
12130 */
12131 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12132 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12133 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12134 rc = BAD_VALUE;
12135 }
12136 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12137 if (rc != NO_ERROR) {
12138 LOGE("extractSceneMode failed");
12139 }
12140 }
12141
12142 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12143 uint8_t fwk_aeMode =
12144 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12145 uint8_t aeMode;
12146 int32_t redeye;
12147
12148 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12149 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012150 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12151 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012152 } else {
12153 aeMode = CAM_AE_MODE_ON;
12154 }
12155 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12156 redeye = 1;
12157 } else {
12158 redeye = 0;
12159 }
12160
12161 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12162 fwk_aeMode);
12163 if (NAME_NOT_FOUND != val) {
12164 int32_t flashMode = (int32_t)val;
12165 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12166 }
12167
12168 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12169 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12170 rc = BAD_VALUE;
12171 }
12172 }
12173
12174 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12175 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12176 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12177 fwk_whiteLevel);
12178 if (NAME_NOT_FOUND != val) {
12179 uint8_t whiteLevel = (uint8_t)val;
12180 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12181 rc = BAD_VALUE;
12182 }
12183 }
12184 }
12185
12186 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12187 uint8_t fwk_cacMode =
12188 frame_settings.find(
12189 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12190 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12191 fwk_cacMode);
12192 if (NAME_NOT_FOUND != val) {
12193 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12194 bool entryAvailable = FALSE;
12195 // Check whether Frameworks set CAC mode is supported in device or not
12196 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12197 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12198 entryAvailable = TRUE;
12199 break;
12200 }
12201 }
12202 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12203 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12204 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12205 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12206 if (entryAvailable == FALSE) {
12207 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12208 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12209 } else {
12210 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12211 // High is not supported and so set the FAST as spec say's underlying
12212 // device implementation can be the same for both modes.
12213 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12214 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12215 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12216 // in order to avoid the fps drop due to high quality
12217 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12218 } else {
12219 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12220 }
12221 }
12222 }
12223 LOGD("Final cacMode is %d", cacMode);
12224 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12225 rc = BAD_VALUE;
12226 }
12227 } else {
12228 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12229 }
12230 }
12231
Jason Lee84ae9972017-02-24 13:24:24 -080012232 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012233 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012234 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012235 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012236 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12237 fwk_focusMode);
12238 if (NAME_NOT_FOUND != val) {
12239 uint8_t focusMode = (uint8_t)val;
12240 LOGD("set focus mode %d", focusMode);
12241 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12242 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12243 rc = BAD_VALUE;
12244 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012245 }
12246 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012247 } else {
12248 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12249 LOGE("Focus forced to infinity %d", focusMode);
12250 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12251 rc = BAD_VALUE;
12252 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012253 }
12254
Jason Lee84ae9972017-02-24 13:24:24 -080012255 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12256 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012257 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12258 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12259 focalDistance)) {
12260 rc = BAD_VALUE;
12261 }
12262 }
12263
12264 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12265 uint8_t fwk_antibandingMode =
12266 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12267 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12268 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12269 if (NAME_NOT_FOUND != val) {
12270 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012271 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12272 if (m60HzZone) {
12273 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12274 } else {
12275 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12276 }
12277 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012278 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12279 hal_antibandingMode)) {
12280 rc = BAD_VALUE;
12281 }
12282 }
12283 }
12284
12285 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12286 int32_t expCompensation = frame_settings.find(
12287 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12288 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12289 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12290 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12291 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012292 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012293 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12294 expCompensation)) {
12295 rc = BAD_VALUE;
12296 }
12297 }
12298
12299 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12300 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12301 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12302 rc = BAD_VALUE;
12303 }
12304 }
12305 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12306 rc = setHalFpsRange(frame_settings, hal_metadata);
12307 if (rc != NO_ERROR) {
12308 LOGE("setHalFpsRange failed");
12309 }
12310 }
12311
12312 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12313 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12314 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12315 rc = BAD_VALUE;
12316 }
12317 }
12318
12319 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12320 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12321 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12322 fwk_effectMode);
12323 if (NAME_NOT_FOUND != val) {
12324 uint8_t effectMode = (uint8_t)val;
12325 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12326 rc = BAD_VALUE;
12327 }
12328 }
12329 }
12330
12331 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12332 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12333 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12334 colorCorrectMode)) {
12335 rc = BAD_VALUE;
12336 }
12337 }
12338
12339 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12340 cam_color_correct_gains_t colorCorrectGains;
12341 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12342 colorCorrectGains.gains[i] =
12343 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12344 }
12345 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12346 colorCorrectGains)) {
12347 rc = BAD_VALUE;
12348 }
12349 }
12350
12351 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12352 cam_color_correct_matrix_t colorCorrectTransform;
12353 cam_rational_type_t transform_elem;
12354 size_t num = 0;
12355 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12356 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12357 transform_elem.numerator =
12358 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12359 transform_elem.denominator =
12360 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12361 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12362 num++;
12363 }
12364 }
12365 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12366 colorCorrectTransform)) {
12367 rc = BAD_VALUE;
12368 }
12369 }
12370
12371 cam_trigger_t aecTrigger;
12372 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12373 aecTrigger.trigger_id = -1;
12374 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12375 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12376 aecTrigger.trigger =
12377 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12378 aecTrigger.trigger_id =
12379 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12380 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12381 aecTrigger)) {
12382 rc = BAD_VALUE;
12383 }
12384 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12385 aecTrigger.trigger, aecTrigger.trigger_id);
12386 }
12387
12388 /*af_trigger must come with a trigger id*/
12389 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12390 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12391 cam_trigger_t af_trigger;
12392 af_trigger.trigger =
12393 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12394 af_trigger.trigger_id =
12395 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12396 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12397 rc = BAD_VALUE;
12398 }
12399 LOGD("AfTrigger: %d AfTriggerID: %d",
12400 af_trigger.trigger, af_trigger.trigger_id);
12401 }
12402
12403 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12404 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12405 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12406 rc = BAD_VALUE;
12407 }
12408 }
12409 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12410 cam_edge_application_t edge_application;
12411 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012412
Thierry Strudel3d639192016-09-09 11:52:26 -070012413 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12414 edge_application.sharpness = 0;
12415 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012416 edge_application.sharpness =
12417 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12418 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12419 int32_t sharpness =
12420 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12421 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12422 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12423 LOGD("Setting edge mode sharpness %d", sharpness);
12424 edge_application.sharpness = sharpness;
12425 }
12426 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012427 }
12428 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12429 rc = BAD_VALUE;
12430 }
12431 }
12432
12433 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12434 int32_t respectFlashMode = 1;
12435 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12436 uint8_t fwk_aeMode =
12437 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012438 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12439 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12440 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012441 respectFlashMode = 0;
12442 LOGH("AE Mode controls flash, ignore android.flash.mode");
12443 }
12444 }
12445 if (respectFlashMode) {
12446 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12447 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12448 LOGH("flash mode after mapping %d", val);
12449 // To check: CAM_INTF_META_FLASH_MODE usage
12450 if (NAME_NOT_FOUND != val) {
12451 uint8_t flashMode = (uint8_t)val;
12452 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12453 rc = BAD_VALUE;
12454 }
12455 }
12456 }
12457 }
12458
12459 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12460 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12461 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12462 rc = BAD_VALUE;
12463 }
12464 }
12465
12466 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12467 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12468 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12469 flashFiringTime)) {
12470 rc = BAD_VALUE;
12471 }
12472 }
12473
12474 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12475 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12476 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12477 hotPixelMode)) {
12478 rc = BAD_VALUE;
12479 }
12480 }
12481
12482 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12483 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12484 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12485 lensAperture)) {
12486 rc = BAD_VALUE;
12487 }
12488 }
12489
12490 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12491 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12492 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12493 filterDensity)) {
12494 rc = BAD_VALUE;
12495 }
12496 }
12497
12498 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12499 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12500 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12501 focalLength)) {
12502 rc = BAD_VALUE;
12503 }
12504 }
12505
12506 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12507 uint8_t optStabMode =
12508 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12509 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12510 optStabMode)) {
12511 rc = BAD_VALUE;
12512 }
12513 }
12514
12515 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12516 uint8_t videoStabMode =
12517 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12518 LOGD("videoStabMode from APP = %d", videoStabMode);
12519 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12520 videoStabMode)) {
12521 rc = BAD_VALUE;
12522 }
12523 }
12524
12525
12526 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12527 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12528 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12529 noiseRedMode)) {
12530 rc = BAD_VALUE;
12531 }
12532 }
12533
12534 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12535 float reprocessEffectiveExposureFactor =
12536 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12537 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12538 reprocessEffectiveExposureFactor)) {
12539 rc = BAD_VALUE;
12540 }
12541 }
12542
12543 cam_crop_region_t scalerCropRegion;
12544 bool scalerCropSet = false;
12545 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12546 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12547 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12548 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12549 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12550
12551 // Map coordinate system from active array to sensor output.
12552 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12553 scalerCropRegion.width, scalerCropRegion.height);
12554
12555 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12556 scalerCropRegion)) {
12557 rc = BAD_VALUE;
12558 }
12559 scalerCropSet = true;
12560 }
12561
12562 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12563 int64_t sensorExpTime =
12564 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12565 LOGD("setting sensorExpTime %lld", sensorExpTime);
12566 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12567 sensorExpTime)) {
12568 rc = BAD_VALUE;
12569 }
12570 }
12571
12572 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12573 int64_t sensorFrameDuration =
12574 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012575 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12576 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12577 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12578 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12579 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12580 sensorFrameDuration)) {
12581 rc = BAD_VALUE;
12582 }
12583 }
12584
12585 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12586 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12587 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12588 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12589 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12590 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12591 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12592 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12593 sensorSensitivity)) {
12594 rc = BAD_VALUE;
12595 }
12596 }
12597
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012598#ifndef USE_HAL_3_3
12599 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12600 int32_t ispSensitivity =
12601 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12602 if (ispSensitivity <
12603 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12604 ispSensitivity =
12605 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12606 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12607 }
12608 if (ispSensitivity >
12609 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12610 ispSensitivity =
12611 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12612 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12613 }
12614 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12615 ispSensitivity)) {
12616 rc = BAD_VALUE;
12617 }
12618 }
12619#endif
12620
Thierry Strudel3d639192016-09-09 11:52:26 -070012621 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12622 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12623 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12624 rc = BAD_VALUE;
12625 }
12626 }
12627
12628 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12629 uint8_t fwk_facedetectMode =
12630 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12631
12632 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12633 fwk_facedetectMode);
12634
12635 if (NAME_NOT_FOUND != val) {
12636 uint8_t facedetectMode = (uint8_t)val;
12637 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12638 facedetectMode)) {
12639 rc = BAD_VALUE;
12640 }
12641 }
12642 }
12643
Thierry Strudel54dc9782017-02-15 12:12:10 -080012644 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012645 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012646 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012647 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12648 histogramMode)) {
12649 rc = BAD_VALUE;
12650 }
12651 }
12652
12653 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12654 uint8_t sharpnessMapMode =
12655 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12656 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12657 sharpnessMapMode)) {
12658 rc = BAD_VALUE;
12659 }
12660 }
12661
12662 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12663 uint8_t tonemapMode =
12664 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12665 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12666 rc = BAD_VALUE;
12667 }
12668 }
12669 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12670 /*All tonemap channels will have the same number of points*/
12671 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12672 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12673 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12674 cam_rgb_tonemap_curves tonemapCurves;
12675 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12676 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12677 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12678 tonemapCurves.tonemap_points_cnt,
12679 CAM_MAX_TONEMAP_CURVE_SIZE);
12680 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12681 }
12682
12683 /* ch0 = G*/
12684 size_t point = 0;
12685 cam_tonemap_curve_t tonemapCurveGreen;
12686 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12687 for (size_t j = 0; j < 2; j++) {
12688 tonemapCurveGreen.tonemap_points[i][j] =
12689 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12690 point++;
12691 }
12692 }
12693 tonemapCurves.curves[0] = tonemapCurveGreen;
12694
12695 /* ch 1 = B */
12696 point = 0;
12697 cam_tonemap_curve_t tonemapCurveBlue;
12698 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12699 for (size_t j = 0; j < 2; j++) {
12700 tonemapCurveBlue.tonemap_points[i][j] =
12701 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12702 point++;
12703 }
12704 }
12705 tonemapCurves.curves[1] = tonemapCurveBlue;
12706
12707 /* ch 2 = R */
12708 point = 0;
12709 cam_tonemap_curve_t tonemapCurveRed;
12710 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12711 for (size_t j = 0; j < 2; j++) {
12712 tonemapCurveRed.tonemap_points[i][j] =
12713 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12714 point++;
12715 }
12716 }
12717 tonemapCurves.curves[2] = tonemapCurveRed;
12718
12719 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12720 tonemapCurves)) {
12721 rc = BAD_VALUE;
12722 }
12723 }
12724
12725 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12726 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12727 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12728 captureIntent)) {
12729 rc = BAD_VALUE;
12730 }
12731 }
12732
12733 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12734 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12735 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12736 blackLevelLock)) {
12737 rc = BAD_VALUE;
12738 }
12739 }
12740
12741 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12742 uint8_t lensShadingMapMode =
12743 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12744 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12745 lensShadingMapMode)) {
12746 rc = BAD_VALUE;
12747 }
12748 }
12749
12750 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12751 cam_area_t roi;
12752 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012753 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012754
12755 // Map coordinate system from active array to sensor output.
12756 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12757 roi.rect.height);
12758
12759 if (scalerCropSet) {
12760 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12761 }
12762 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12763 rc = BAD_VALUE;
12764 }
12765 }
12766
12767 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12768 cam_area_t roi;
12769 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012770 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012771
12772 // Map coordinate system from active array to sensor output.
12773 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12774 roi.rect.height);
12775
12776 if (scalerCropSet) {
12777 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12778 }
12779 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12780 rc = BAD_VALUE;
12781 }
12782 }
12783
12784 // CDS for non-HFR non-video mode
12785 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12786 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12787 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12788 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12789 LOGE("Invalid CDS mode %d!", *fwk_cds);
12790 } else {
12791 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12792 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12793 rc = BAD_VALUE;
12794 }
12795 }
12796 }
12797
Thierry Strudel04e026f2016-10-10 11:27:36 -070012798 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012799 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012800 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012801 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12802 }
12803 if (m_bVideoHdrEnabled)
12804 vhdr = CAM_VIDEO_HDR_MODE_ON;
12805
Thierry Strudel54dc9782017-02-15 12:12:10 -080012806 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12807
12808 if(vhdr != curr_hdr_state)
12809 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12810
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012811 rc = setVideoHdrMode(mParameters, vhdr);
12812 if (rc != NO_ERROR) {
12813 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012814 }
12815
12816 //IR
12817 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12818 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12819 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012820 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12821 uint8_t isIRon = 0;
12822
12823 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012824 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12825 LOGE("Invalid IR mode %d!", fwk_ir);
12826 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012827 if(isIRon != curr_ir_state )
12828 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12829
Thierry Strudel04e026f2016-10-10 11:27:36 -070012830 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12831 CAM_INTF_META_IR_MODE, fwk_ir)) {
12832 rc = BAD_VALUE;
12833 }
12834 }
12835 }
12836
Thierry Strudel54dc9782017-02-15 12:12:10 -080012837 //Binning Correction Mode
12838 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12839 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12840 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12841 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12842 || (0 > fwk_binning_correction)) {
12843 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12844 } else {
12845 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12846 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12847 rc = BAD_VALUE;
12848 }
12849 }
12850 }
12851
Thierry Strudel269c81a2016-10-12 12:13:59 -070012852 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12853 float aec_speed;
12854 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12855 LOGD("AEC Speed :%f", aec_speed);
12856 if ( aec_speed < 0 ) {
12857 LOGE("Invalid AEC mode %f!", aec_speed);
12858 } else {
12859 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12860 aec_speed)) {
12861 rc = BAD_VALUE;
12862 }
12863 }
12864 }
12865
12866 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12867 float awb_speed;
12868 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12869 LOGD("AWB Speed :%f", awb_speed);
12870 if ( awb_speed < 0 ) {
12871 LOGE("Invalid AWB mode %f!", awb_speed);
12872 } else {
12873 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12874 awb_speed)) {
12875 rc = BAD_VALUE;
12876 }
12877 }
12878 }
12879
Thierry Strudel3d639192016-09-09 11:52:26 -070012880 // TNR
12881 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12882 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12883 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012884 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012885 cam_denoise_param_t tnr;
12886 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12887 tnr.process_plates =
12888 (cam_denoise_process_type_t)frame_settings.find(
12889 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12890 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012891
12892 if(b_TnrRequested != curr_tnr_state)
12893 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12894
Thierry Strudel3d639192016-09-09 11:52:26 -070012895 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12896 rc = BAD_VALUE;
12897 }
12898 }
12899
Thierry Strudel54dc9782017-02-15 12:12:10 -080012900 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012901 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012902 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012903 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12904 *exposure_metering_mode)) {
12905 rc = BAD_VALUE;
12906 }
12907 }
12908
Thierry Strudel3d639192016-09-09 11:52:26 -070012909 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12910 int32_t fwk_testPatternMode =
12911 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12912 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12913 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12914
12915 if (NAME_NOT_FOUND != testPatternMode) {
12916 cam_test_pattern_data_t testPatternData;
12917 memset(&testPatternData, 0, sizeof(testPatternData));
12918 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12919 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12920 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12921 int32_t *fwk_testPatternData =
12922 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12923 testPatternData.r = fwk_testPatternData[0];
12924 testPatternData.b = fwk_testPatternData[3];
12925 switch (gCamCapability[mCameraId]->color_arrangement) {
12926 case CAM_FILTER_ARRANGEMENT_RGGB:
12927 case CAM_FILTER_ARRANGEMENT_GRBG:
12928 testPatternData.gr = fwk_testPatternData[1];
12929 testPatternData.gb = fwk_testPatternData[2];
12930 break;
12931 case CAM_FILTER_ARRANGEMENT_GBRG:
12932 case CAM_FILTER_ARRANGEMENT_BGGR:
12933 testPatternData.gr = fwk_testPatternData[2];
12934 testPatternData.gb = fwk_testPatternData[1];
12935 break;
12936 default:
12937 LOGE("color arrangement %d is not supported",
12938 gCamCapability[mCameraId]->color_arrangement);
12939 break;
12940 }
12941 }
12942 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12943 testPatternData)) {
12944 rc = BAD_VALUE;
12945 }
12946 } else {
12947 LOGE("Invalid framework sensor test pattern mode %d",
12948 fwk_testPatternMode);
12949 }
12950 }
12951
12952 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12953 size_t count = 0;
12954 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12955 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12956 gps_coords.data.d, gps_coords.count, count);
12957 if (gps_coords.count != count) {
12958 rc = BAD_VALUE;
12959 }
12960 }
12961
12962 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12963 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12964 size_t count = 0;
12965 const char *gps_methods_src = (const char *)
12966 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12967 memset(gps_methods, '\0', sizeof(gps_methods));
12968 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12969 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12970 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12971 if (GPS_PROCESSING_METHOD_SIZE != count) {
12972 rc = BAD_VALUE;
12973 }
12974 }
12975
12976 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12977 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12978 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12979 gps_timestamp)) {
12980 rc = BAD_VALUE;
12981 }
12982 }
12983
12984 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12985 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12986 cam_rotation_info_t rotation_info;
12987 if (orientation == 0) {
12988 rotation_info.rotation = ROTATE_0;
12989 } else if (orientation == 90) {
12990 rotation_info.rotation = ROTATE_90;
12991 } else if (orientation == 180) {
12992 rotation_info.rotation = ROTATE_180;
12993 } else if (orientation == 270) {
12994 rotation_info.rotation = ROTATE_270;
12995 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012996 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012997 rotation_info.streamId = snapshotStreamId;
12998 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12999 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
13000 rc = BAD_VALUE;
13001 }
13002 }
13003
13004 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
13005 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
13006 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
13007 rc = BAD_VALUE;
13008 }
13009 }
13010
13011 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13012 uint32_t thumb_quality = (uint32_t)
13013 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13014 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13015 thumb_quality)) {
13016 rc = BAD_VALUE;
13017 }
13018 }
13019
13020 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13021 cam_dimension_t dim;
13022 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13023 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13024 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13025 rc = BAD_VALUE;
13026 }
13027 }
13028
13029 // Internal metadata
13030 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13031 size_t count = 0;
13032 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13033 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13034 privatedata.data.i32, privatedata.count, count);
13035 if (privatedata.count != count) {
13036 rc = BAD_VALUE;
13037 }
13038 }
13039
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013040 // ISO/Exposure Priority
13041 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13042 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13043 cam_priority_mode_t mode =
13044 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13045 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13046 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13047 use_iso_exp_pty.previewOnly = FALSE;
13048 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13049 use_iso_exp_pty.value = *ptr;
13050
13051 if(CAM_ISO_PRIORITY == mode) {
13052 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13053 use_iso_exp_pty)) {
13054 rc = BAD_VALUE;
13055 }
13056 }
13057 else {
13058 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13059 use_iso_exp_pty)) {
13060 rc = BAD_VALUE;
13061 }
13062 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013063
13064 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13065 rc = BAD_VALUE;
13066 }
13067 }
13068 } else {
13069 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13070 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013071 }
13072 }
13073
13074 // Saturation
13075 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13076 int32_t* use_saturation =
13077 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13078 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13079 rc = BAD_VALUE;
13080 }
13081 }
13082
Thierry Strudel3d639192016-09-09 11:52:26 -070013083 // EV step
13084 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13085 gCamCapability[mCameraId]->exp_compensation_step)) {
13086 rc = BAD_VALUE;
13087 }
13088
13089 // CDS info
13090 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13091 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13092 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13093
13094 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13095 CAM_INTF_META_CDS_DATA, *cdsData)) {
13096 rc = BAD_VALUE;
13097 }
13098 }
13099
Shuzhen Wang19463d72016-03-08 11:09:52 -080013100 // Hybrid AE
13101 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13102 uint8_t *hybrid_ae = (uint8_t *)
13103 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
Shuzhen Wang77b049a2017-08-30 12:24:36 -070013104 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13105 rc = BAD_VALUE;
13106 }
Shuzhen Wang19463d72016-03-08 11:09:52 -080013107 }
13108
Shuzhen Wang14415f52016-11-16 18:26:18 -080013109 // Histogram
13110 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13111 uint8_t histogramMode =
13112 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13113 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13114 histogramMode)) {
13115 rc = BAD_VALUE;
13116 }
13117 }
13118
13119 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13120 int32_t histogramBins =
13121 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13122 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13123 histogramBins)) {
13124 rc = BAD_VALUE;
13125 }
13126 }
13127
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013128 // Tracking AF
13129 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13130 uint8_t trackingAfTrigger =
13131 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13132 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13133 trackingAfTrigger)) {
13134 rc = BAD_VALUE;
13135 }
13136 }
13137
Chien-Yu Chendbd619b2017-08-04 17:50:11 -070013138 // Makernote
13139 camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13140 if (entry.count != 0) {
13141 if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13142 cam_makernote_t makernote;
13143 makernote.length = entry.count;
13144 memcpy(makernote.data, entry.data.u8, makernote.length);
13145 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13146 rc = BAD_VALUE;
13147 }
13148 } else {
13149 ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13150 MAX_MAKERNOTE_LENGTH);
13151 rc = BAD_VALUE;
13152 }
13153 }
13154
Thierry Strudel3d639192016-09-09 11:52:26 -070013155 return rc;
13156}
13157
13158/*===========================================================================
13159 * FUNCTION : captureResultCb
13160 *
13161 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13162 *
13163 * PARAMETERS :
13164 * @frame : frame information from mm-camera-interface
13165 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13166 * @userdata: userdata
13167 *
13168 * RETURN : NONE
13169 *==========================================================================*/
13170void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13171 camera3_stream_buffer_t *buffer,
13172 uint32_t frame_number, bool isInputBuffer, void *userdata)
13173{
13174 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13175 if (hw == NULL) {
13176 LOGE("Invalid hw %p", hw);
13177 return;
13178 }
13179
13180 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13181 return;
13182}
13183
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013184/*===========================================================================
13185 * FUNCTION : setBufferErrorStatus
13186 *
13187 * DESCRIPTION: Callback handler for channels to report any buffer errors
13188 *
13189 * PARAMETERS :
13190 * @ch : Channel on which buffer error is reported from
13191 * @frame_number : frame number on which buffer error is reported on
13192 * @buffer_status : buffer error status
13193 * @userdata: userdata
13194 *
13195 * RETURN : NONE
13196 *==========================================================================*/
13197void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13198 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13199{
13200 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13201 if (hw == NULL) {
13202 LOGE("Invalid hw %p", hw);
13203 return;
13204 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013205
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013206 hw->setBufferErrorStatus(ch, frame_number, err);
13207 return;
13208}
13209
13210void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13211 uint32_t frameNumber, camera3_buffer_status_t err)
13212{
13213 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13214 pthread_mutex_lock(&mMutex);
13215
13216 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13217 if (req.frame_number != frameNumber)
13218 continue;
13219 for (auto& k : req.mPendingBufferList) {
13220 if(k.stream->priv == ch) {
13221 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13222 }
13223 }
13224 }
13225
13226 pthread_mutex_unlock(&mMutex);
13227 return;
13228}
Thierry Strudel3d639192016-09-09 11:52:26 -070013229/*===========================================================================
13230 * FUNCTION : initialize
13231 *
13232 * DESCRIPTION: Pass framework callback pointers to HAL
13233 *
13234 * PARAMETERS :
13235 *
13236 *
13237 * RETURN : Success : 0
13238 * Failure: -ENODEV
13239 *==========================================================================*/
13240
13241int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13242 const camera3_callback_ops_t *callback_ops)
13243{
13244 LOGD("E");
13245 QCamera3HardwareInterface *hw =
13246 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13247 if (!hw) {
13248 LOGE("NULL camera device");
13249 return -ENODEV;
13250 }
13251
13252 int rc = hw->initialize(callback_ops);
13253 LOGD("X");
13254 return rc;
13255}
13256
13257/*===========================================================================
13258 * FUNCTION : configure_streams
13259 *
13260 * DESCRIPTION:
13261 *
13262 * PARAMETERS :
13263 *
13264 *
13265 * RETURN : Success: 0
13266 * Failure: -EINVAL (if stream configuration is invalid)
13267 * -ENODEV (fatal error)
13268 *==========================================================================*/
13269
13270int QCamera3HardwareInterface::configure_streams(
13271 const struct camera3_device *device,
13272 camera3_stream_configuration_t *stream_list)
13273{
13274 LOGD("E");
13275 QCamera3HardwareInterface *hw =
13276 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13277 if (!hw) {
13278 LOGE("NULL camera device");
13279 return -ENODEV;
13280 }
13281 int rc = hw->configureStreams(stream_list);
13282 LOGD("X");
13283 return rc;
13284}
13285
13286/*===========================================================================
13287 * FUNCTION : construct_default_request_settings
13288 *
13289 * DESCRIPTION: Configure a settings buffer to meet the required use case
13290 *
13291 * PARAMETERS :
13292 *
13293 *
13294 * RETURN : Success: Return valid metadata
13295 * Failure: Return NULL
13296 *==========================================================================*/
13297const camera_metadata_t* QCamera3HardwareInterface::
13298 construct_default_request_settings(const struct camera3_device *device,
13299 int type)
13300{
13301
13302 LOGD("E");
13303 camera_metadata_t* fwk_metadata = NULL;
13304 QCamera3HardwareInterface *hw =
13305 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13306 if (!hw) {
13307 LOGE("NULL camera device");
13308 return NULL;
13309 }
13310
13311 fwk_metadata = hw->translateCapabilityToMetadata(type);
13312
13313 LOGD("X");
13314 return fwk_metadata;
13315}
13316
13317/*===========================================================================
13318 * FUNCTION : process_capture_request
13319 *
13320 * DESCRIPTION:
13321 *
13322 * PARAMETERS :
13323 *
13324 *
13325 * RETURN :
13326 *==========================================================================*/
13327int QCamera3HardwareInterface::process_capture_request(
13328 const struct camera3_device *device,
13329 camera3_capture_request_t *request)
13330{
13331 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013332 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013333 QCamera3HardwareInterface *hw =
13334 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13335 if (!hw) {
13336 LOGE("NULL camera device");
13337 return -EINVAL;
13338 }
13339
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013340 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013341 LOGD("X");
13342 return rc;
13343}
13344
13345/*===========================================================================
13346 * FUNCTION : dump
13347 *
13348 * DESCRIPTION:
13349 *
13350 * PARAMETERS :
13351 *
13352 *
13353 * RETURN :
13354 *==========================================================================*/
13355
13356void QCamera3HardwareInterface::dump(
13357 const struct camera3_device *device, int fd)
13358{
13359 /* Log level property is read when "adb shell dumpsys media.camera" is
13360 called so that the log level can be controlled without restarting
13361 the media server */
13362 getLogLevel();
13363
13364 LOGD("E");
13365 QCamera3HardwareInterface *hw =
13366 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13367 if (!hw) {
13368 LOGE("NULL camera device");
13369 return;
13370 }
13371
13372 hw->dump(fd);
13373 LOGD("X");
13374 return;
13375}
13376
13377/*===========================================================================
13378 * FUNCTION : flush
13379 *
13380 * DESCRIPTION:
13381 *
13382 * PARAMETERS :
13383 *
13384 *
13385 * RETURN :
13386 *==========================================================================*/
13387
13388int QCamera3HardwareInterface::flush(
13389 const struct camera3_device *device)
13390{
13391 int rc;
13392 LOGD("E");
13393 QCamera3HardwareInterface *hw =
13394 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13395 if (!hw) {
13396 LOGE("NULL camera device");
13397 return -EINVAL;
13398 }
13399
13400 pthread_mutex_lock(&hw->mMutex);
13401 // Validate current state
13402 switch (hw->mState) {
13403 case STARTED:
13404 /* valid state */
13405 break;
13406
13407 case ERROR:
13408 pthread_mutex_unlock(&hw->mMutex);
13409 hw->handleCameraDeviceError();
13410 return -ENODEV;
13411
13412 default:
13413 LOGI("Flush returned during state %d", hw->mState);
13414 pthread_mutex_unlock(&hw->mMutex);
13415 return 0;
13416 }
13417 pthread_mutex_unlock(&hw->mMutex);
13418
13419 rc = hw->flush(true /* restart channels */ );
13420 LOGD("X");
13421 return rc;
13422}
13423
13424/*===========================================================================
13425 * FUNCTION : close_camera_device
13426 *
13427 * DESCRIPTION:
13428 *
13429 * PARAMETERS :
13430 *
13431 *
13432 * RETURN :
13433 *==========================================================================*/
13434int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13435{
13436 int ret = NO_ERROR;
13437 QCamera3HardwareInterface *hw =
13438 reinterpret_cast<QCamera3HardwareInterface *>(
13439 reinterpret_cast<camera3_device_t *>(device)->priv);
13440 if (!hw) {
13441 LOGE("NULL camera device");
13442 return BAD_VALUE;
13443 }
13444
13445 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13446 delete hw;
13447 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013448 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013449 return ret;
13450}
13451
13452/*===========================================================================
13453 * FUNCTION : getWaveletDenoiseProcessPlate
13454 *
13455 * DESCRIPTION: query wavelet denoise process plate
13456 *
13457 * PARAMETERS : None
13458 *
13459 * RETURN : WNR prcocess plate value
13460 *==========================================================================*/
13461cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13462{
13463 char prop[PROPERTY_VALUE_MAX];
13464 memset(prop, 0, sizeof(prop));
13465 property_get("persist.denoise.process.plates", prop, "0");
13466 int processPlate = atoi(prop);
13467 switch(processPlate) {
13468 case 0:
13469 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13470 case 1:
13471 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13472 case 2:
13473 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13474 case 3:
13475 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13476 default:
13477 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13478 }
13479}
13480
13481
13482/*===========================================================================
13483 * FUNCTION : getTemporalDenoiseProcessPlate
13484 *
13485 * DESCRIPTION: query temporal denoise process plate
13486 *
13487 * PARAMETERS : None
13488 *
13489 * RETURN : TNR prcocess plate value
13490 *==========================================================================*/
13491cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13492{
13493 char prop[PROPERTY_VALUE_MAX];
13494 memset(prop, 0, sizeof(prop));
13495 property_get("persist.tnr.process.plates", prop, "0");
13496 int processPlate = atoi(prop);
13497 switch(processPlate) {
13498 case 0:
13499 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13500 case 1:
13501 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13502 case 2:
13503 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13504 case 3:
13505 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13506 default:
13507 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13508 }
13509}
13510
13511
13512/*===========================================================================
13513 * FUNCTION : extractSceneMode
13514 *
13515 * DESCRIPTION: Extract scene mode from frameworks set metadata
13516 *
13517 * PARAMETERS :
13518 * @frame_settings: CameraMetadata reference
13519 * @metaMode: ANDROID_CONTORL_MODE
13520 * @hal_metadata: hal metadata structure
13521 *
13522 * RETURN : None
13523 *==========================================================================*/
13524int32_t QCamera3HardwareInterface::extractSceneMode(
13525 const CameraMetadata &frame_settings, uint8_t metaMode,
13526 metadata_buffer_t *hal_metadata)
13527{
13528 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013529 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13530
13531 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13532 LOGD("Ignoring control mode OFF_KEEP_STATE");
13533 return NO_ERROR;
13534 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013535
13536 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13537 camera_metadata_ro_entry entry =
13538 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13539 if (0 == entry.count)
13540 return rc;
13541
13542 uint8_t fwk_sceneMode = entry.data.u8[0];
13543
13544 int val = lookupHalName(SCENE_MODES_MAP,
13545 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13546 fwk_sceneMode);
13547 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013548 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013549 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013550 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013551 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013552
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013553 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13554 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13555 }
13556
13557 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13558 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013559 cam_hdr_param_t hdr_params;
13560 hdr_params.hdr_enable = 1;
13561 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13562 hdr_params.hdr_need_1x = false;
13563 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13564 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13565 rc = BAD_VALUE;
13566 }
13567 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013568
Thierry Strudel3d639192016-09-09 11:52:26 -070013569 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13570 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13571 rc = BAD_VALUE;
13572 }
13573 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013574
13575 if (mForceHdrSnapshot) {
13576 cam_hdr_param_t hdr_params;
13577 hdr_params.hdr_enable = 1;
13578 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13579 hdr_params.hdr_need_1x = false;
13580 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13581 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13582 rc = BAD_VALUE;
13583 }
13584 }
13585
Thierry Strudel3d639192016-09-09 11:52:26 -070013586 return rc;
13587}
13588
13589/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013590 * FUNCTION : setVideoHdrMode
13591 *
13592 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13593 *
13594 * PARAMETERS :
13595 * @hal_metadata: hal metadata structure
13596 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13597 *
13598 * RETURN : None
13599 *==========================================================================*/
13600int32_t QCamera3HardwareInterface::setVideoHdrMode(
13601 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13602{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013603 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13604 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13605 }
13606
13607 LOGE("Invalid Video HDR mode %d!", vhdr);
13608 return BAD_VALUE;
13609}
13610
13611/*===========================================================================
13612 * FUNCTION : setSensorHDR
13613 *
13614 * DESCRIPTION: Enable/disable sensor HDR.
13615 *
13616 * PARAMETERS :
13617 * @hal_metadata: hal metadata structure
13618 * @enable: boolean whether to enable/disable sensor HDR
13619 *
13620 * RETURN : None
13621 *==========================================================================*/
13622int32_t QCamera3HardwareInterface::setSensorHDR(
13623 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13624{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013625 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013626 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13627
13628 if (enable) {
13629 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13630 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13631 #ifdef _LE_CAMERA_
13632 //Default to staggered HDR for IOT
13633 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13634 #else
13635 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13636 #endif
13637 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13638 }
13639
13640 bool isSupported = false;
13641 switch (sensor_hdr) {
13642 case CAM_SENSOR_HDR_IN_SENSOR:
13643 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13644 CAM_QCOM_FEATURE_SENSOR_HDR) {
13645 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013646 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013647 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013648 break;
13649 case CAM_SENSOR_HDR_ZIGZAG:
13650 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13651 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13652 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013653 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013654 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013655 break;
13656 case CAM_SENSOR_HDR_STAGGERED:
13657 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13658 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13659 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013660 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013661 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013662 break;
13663 case CAM_SENSOR_HDR_OFF:
13664 isSupported = true;
13665 LOGD("Turning off sensor HDR");
13666 break;
13667 default:
13668 LOGE("HDR mode %d not supported", sensor_hdr);
13669 rc = BAD_VALUE;
13670 break;
13671 }
13672
13673 if(isSupported) {
13674 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13675 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13676 rc = BAD_VALUE;
13677 } else {
13678 if(!isVideoHdrEnable)
13679 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013680 }
13681 }
13682 return rc;
13683}
13684
13685/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013686 * FUNCTION : needRotationReprocess
13687 *
13688 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13689 *
13690 * PARAMETERS : none
13691 *
13692 * RETURN : true: needed
13693 * false: no need
13694 *==========================================================================*/
13695bool QCamera3HardwareInterface::needRotationReprocess()
13696{
13697 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13698 // current rotation is not zero, and pp has the capability to process rotation
13699 LOGH("need do reprocess for rotation");
13700 return true;
13701 }
13702
13703 return false;
13704}
13705
13706/*===========================================================================
13707 * FUNCTION : needReprocess
13708 *
13709 * DESCRIPTION: if reprocess in needed
13710 *
13711 * PARAMETERS : none
13712 *
13713 * RETURN : true: needed
13714 * false: no need
13715 *==========================================================================*/
13716bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13717{
13718 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13719 // TODO: add for ZSL HDR later
13720 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13721 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13722 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13723 return true;
13724 } else {
13725 LOGH("already post processed frame");
13726 return false;
13727 }
13728 }
13729 return needRotationReprocess();
13730}
13731
13732/*===========================================================================
13733 * FUNCTION : needJpegExifRotation
13734 *
13735 * DESCRIPTION: if rotation from jpeg is needed
13736 *
13737 * PARAMETERS : none
13738 *
13739 * RETURN : true: needed
13740 * false: no need
13741 *==========================================================================*/
13742bool QCamera3HardwareInterface::needJpegExifRotation()
13743{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013744 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013745 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13746 LOGD("Need use Jpeg EXIF Rotation");
13747 return true;
13748 }
13749 return false;
13750}
13751
13752/*===========================================================================
13753 * FUNCTION : addOfflineReprocChannel
13754 *
13755 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13756 * coming from input channel
13757 *
13758 * PARAMETERS :
13759 * @config : reprocess configuration
13760 * @inputChHandle : pointer to the input (source) channel
13761 *
13762 *
13763 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13764 *==========================================================================*/
13765QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13766 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13767{
13768 int32_t rc = NO_ERROR;
13769 QCamera3ReprocessChannel *pChannel = NULL;
13770
13771 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013772 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13773 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013774 if (NULL == pChannel) {
13775 LOGE("no mem for reprocess channel");
13776 return NULL;
13777 }
13778
13779 rc = pChannel->initialize(IS_TYPE_NONE);
13780 if (rc != NO_ERROR) {
13781 LOGE("init reprocess channel failed, ret = %d", rc);
13782 delete pChannel;
13783 return NULL;
13784 }
13785
13786 // pp feature config
13787 cam_pp_feature_config_t pp_config;
13788 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13789
13790 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13791 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13792 & CAM_QCOM_FEATURE_DSDN) {
13793 //Use CPP CDS incase h/w supports it.
13794 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13795 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13796 }
13797 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13798 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13799 }
13800
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013801 if (config.hdr_param.hdr_enable) {
13802 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13803 pp_config.hdr_param = config.hdr_param;
13804 }
13805
13806 if (mForceHdrSnapshot) {
13807 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13808 pp_config.hdr_param.hdr_enable = 1;
13809 pp_config.hdr_param.hdr_need_1x = 0;
13810 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13811 }
13812
Thierry Strudel3d639192016-09-09 11:52:26 -070013813 rc = pChannel->addReprocStreamsFromSource(pp_config,
13814 config,
13815 IS_TYPE_NONE,
13816 mMetadataChannel);
13817
13818 if (rc != NO_ERROR) {
13819 delete pChannel;
13820 return NULL;
13821 }
13822 return pChannel;
13823}
13824
13825/*===========================================================================
13826 * FUNCTION : getMobicatMask
13827 *
13828 * DESCRIPTION: returns mobicat mask
13829 *
13830 * PARAMETERS : none
13831 *
13832 * RETURN : mobicat mask
13833 *
13834 *==========================================================================*/
13835uint8_t QCamera3HardwareInterface::getMobicatMask()
13836{
13837 return m_MobicatMask;
13838}
13839
13840/*===========================================================================
13841 * FUNCTION : setMobicat
13842 *
13843 * DESCRIPTION: set Mobicat on/off.
13844 *
13845 * PARAMETERS :
13846 * @params : none
13847 *
13848 * RETURN : int32_t type of status
13849 * NO_ERROR -- success
13850 * none-zero failure code
13851 *==========================================================================*/
13852int32_t QCamera3HardwareInterface::setMobicat()
13853{
Thierry Strudel3d639192016-09-09 11:52:26 -070013854 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013855
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013856 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013857 tune_cmd_t tune_cmd;
13858 tune_cmd.type = SET_RELOAD_CHROMATIX;
13859 tune_cmd.module = MODULE_ALL;
13860 tune_cmd.value = TRUE;
13861 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13862 CAM_INTF_PARM_SET_VFE_COMMAND,
13863 tune_cmd);
13864
13865 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13866 CAM_INTF_PARM_SET_PP_COMMAND,
13867 tune_cmd);
13868 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013869
13870 return ret;
13871}
13872
13873/*===========================================================================
13874* FUNCTION : getLogLevel
13875*
13876* DESCRIPTION: Reads the log level property into a variable
13877*
13878* PARAMETERS :
13879* None
13880*
13881* RETURN :
13882* None
13883*==========================================================================*/
13884void QCamera3HardwareInterface::getLogLevel()
13885{
13886 char prop[PROPERTY_VALUE_MAX];
13887 uint32_t globalLogLevel = 0;
13888
13889 property_get("persist.camera.hal.debug", prop, "0");
13890 int val = atoi(prop);
13891 if (0 <= val) {
13892 gCamHal3LogLevel = (uint32_t)val;
13893 }
13894
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013895 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013896 gKpiDebugLevel = atoi(prop);
13897
13898 property_get("persist.camera.global.debug", prop, "0");
13899 val = atoi(prop);
13900 if (0 <= val) {
13901 globalLogLevel = (uint32_t)val;
13902 }
13903
13904 /* Highest log level among hal.logs and global.logs is selected */
13905 if (gCamHal3LogLevel < globalLogLevel)
13906 gCamHal3LogLevel = globalLogLevel;
13907
13908 return;
13909}
13910
13911/*===========================================================================
13912 * FUNCTION : validateStreamRotations
13913 *
13914 * DESCRIPTION: Check if the rotations requested are supported
13915 *
13916 * PARAMETERS :
13917 * @stream_list : streams to be configured
13918 *
13919 * RETURN : NO_ERROR on success
13920 * -EINVAL on failure
13921 *
13922 *==========================================================================*/
13923int QCamera3HardwareInterface::validateStreamRotations(
13924 camera3_stream_configuration_t *streamList)
13925{
13926 int rc = NO_ERROR;
13927
13928 /*
13929 * Loop through all streams requested in configuration
13930 * Check if unsupported rotations have been requested on any of them
13931 */
13932 for (size_t j = 0; j < streamList->num_streams; j++){
13933 camera3_stream_t *newStream = streamList->streams[j];
13934
Emilian Peev35ceeed2017-06-29 11:58:56 -070013935 switch(newStream->rotation) {
13936 case CAMERA3_STREAM_ROTATION_0:
13937 case CAMERA3_STREAM_ROTATION_90:
13938 case CAMERA3_STREAM_ROTATION_180:
13939 case CAMERA3_STREAM_ROTATION_270:
13940 //Expected values
13941 break;
13942 default:
13943 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13944 "type:%d and stream format:%d", __func__,
13945 newStream->rotation, newStream->stream_type,
13946 newStream->format);
13947 return -EINVAL;
13948 }
13949
Thierry Strudel3d639192016-09-09 11:52:26 -070013950 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13951 bool isImplDef = (newStream->format ==
13952 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13953 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13954 isImplDef);
13955
13956 if (isRotated && (!isImplDef || isZsl)) {
13957 LOGE("Error: Unsupported rotation of %d requested for stream"
13958 "type:%d and stream format:%d",
13959 newStream->rotation, newStream->stream_type,
13960 newStream->format);
13961 rc = -EINVAL;
13962 break;
13963 }
13964 }
13965
13966 return rc;
13967}
13968
13969/*===========================================================================
13970* FUNCTION : getFlashInfo
13971*
13972* DESCRIPTION: Retrieve information about whether the device has a flash.
13973*
13974* PARAMETERS :
13975* @cameraId : Camera id to query
13976* @hasFlash : Boolean indicating whether there is a flash device
13977* associated with given camera
13978* @flashNode : If a flash device exists, this will be its device node.
13979*
13980* RETURN :
13981* None
13982*==========================================================================*/
13983void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13984 bool& hasFlash,
13985 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13986{
13987 cam_capability_t* camCapability = gCamCapability[cameraId];
13988 if (NULL == camCapability) {
13989 hasFlash = false;
13990 flashNode[0] = '\0';
13991 } else {
13992 hasFlash = camCapability->flash_available;
13993 strlcpy(flashNode,
13994 (char*)camCapability->flash_dev_name,
13995 QCAMERA_MAX_FILEPATH_LENGTH);
13996 }
13997}
13998
13999/*===========================================================================
14000* FUNCTION : getEepromVersionInfo
14001*
14002* DESCRIPTION: Retrieve version info of the sensor EEPROM data
14003*
14004* PARAMETERS : None
14005*
14006* RETURN : string describing EEPROM version
14007* "\0" if no such info available
14008*==========================================================================*/
14009const char *QCamera3HardwareInterface::getEepromVersionInfo()
14010{
14011 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14012}
14013
14014/*===========================================================================
14015* FUNCTION : getLdafCalib
14016*
14017* DESCRIPTION: Retrieve Laser AF calibration data
14018*
14019* PARAMETERS : None
14020*
14021* RETURN : Two uint32_t describing laser AF calibration data
14022* NULL if none is available.
14023*==========================================================================*/
14024const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14025{
14026 if (mLdafCalibExist) {
14027 return &mLdafCalib[0];
14028 } else {
14029 return NULL;
14030 }
14031}
14032
14033/*===========================================================================
Arnd Geis082a4d72017-08-24 10:33:07 -070014034* FUNCTION : getEaselFwVersion
14035*
14036* DESCRIPTION: Retrieve Easel firmware version
14037*
14038* PARAMETERS : None
14039*
14040* RETURN : string describing Firmware version
14041* "\0" if Easel manager client is not open
14042*==========================================================================*/
14043const char *QCamera3HardwareInterface::getEaselFwVersion()
14044{
14045 int rc = NO_ERROR;
14046
14047 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
14048 ALOGD("%s: Querying Easel firmware version", __FUNCTION__);
14049 if (EaselManagerClientOpened) {
14050 rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
14051 if (rc != OK)
14052 ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
14053 else
14054 return (const char *)&mEaselFwVersion[0];
14055 }
14056 return NULL;
14057}
14058
14059/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014060 * FUNCTION : dynamicUpdateMetaStreamInfo
14061 *
14062 * DESCRIPTION: This function:
14063 * (1) stops all the channels
14064 * (2) returns error on pending requests and buffers
14065 * (3) sends metastream_info in setparams
14066 * (4) starts all channels
14067 * This is useful when sensor has to be restarted to apply any
14068 * settings such as frame rate from a different sensor mode
14069 *
14070 * PARAMETERS : None
14071 *
14072 * RETURN : NO_ERROR on success
14073 * Error codes on failure
14074 *
14075 *==========================================================================*/
14076int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14077{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014078 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014079 int rc = NO_ERROR;
14080
14081 LOGD("E");
14082
14083 rc = stopAllChannels();
14084 if (rc < 0) {
14085 LOGE("stopAllChannels failed");
14086 return rc;
14087 }
14088
14089 rc = notifyErrorForPendingRequests();
14090 if (rc < 0) {
14091 LOGE("notifyErrorForPendingRequests failed");
14092 return rc;
14093 }
14094
14095 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14096 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14097 "Format:%d",
14098 mStreamConfigInfo.type[i],
14099 mStreamConfigInfo.stream_sizes[i].width,
14100 mStreamConfigInfo.stream_sizes[i].height,
14101 mStreamConfigInfo.postprocess_mask[i],
14102 mStreamConfigInfo.format[i]);
14103 }
14104
14105 /* Send meta stream info once again so that ISP can start */
14106 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14107 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14108 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14109 mParameters);
14110 if (rc < 0) {
14111 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14112 }
14113
14114 rc = startAllChannels();
14115 if (rc < 0) {
14116 LOGE("startAllChannels failed");
14117 return rc;
14118 }
14119
14120 LOGD("X");
14121 return rc;
14122}
14123
14124/*===========================================================================
14125 * FUNCTION : stopAllChannels
14126 *
14127 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14128 *
14129 * PARAMETERS : None
14130 *
14131 * RETURN : NO_ERROR on success
14132 * Error codes on failure
14133 *
14134 *==========================================================================*/
14135int32_t QCamera3HardwareInterface::stopAllChannels()
14136{
14137 int32_t rc = NO_ERROR;
14138
14139 LOGD("Stopping all channels");
14140 // Stop the Streams/Channels
14141 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14142 it != mStreamInfo.end(); it++) {
14143 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14144 if (channel) {
14145 channel->stop();
14146 }
14147 (*it)->status = INVALID;
14148 }
14149
14150 if (mSupportChannel) {
14151 mSupportChannel->stop();
14152 }
14153 if (mAnalysisChannel) {
14154 mAnalysisChannel->stop();
14155 }
14156 if (mRawDumpChannel) {
14157 mRawDumpChannel->stop();
14158 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014159 if (mHdrPlusRawSrcChannel) {
14160 mHdrPlusRawSrcChannel->stop();
14161 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014162 if (mMetadataChannel) {
14163 /* If content of mStreamInfo is not 0, there is metadata stream */
14164 mMetadataChannel->stop();
14165 }
14166
14167 LOGD("All channels stopped");
14168 return rc;
14169}
14170
14171/*===========================================================================
14172 * FUNCTION : startAllChannels
14173 *
14174 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14175 *
14176 * PARAMETERS : None
14177 *
14178 * RETURN : NO_ERROR on success
14179 * Error codes on failure
14180 *
14181 *==========================================================================*/
14182int32_t QCamera3HardwareInterface::startAllChannels()
14183{
14184 int32_t rc = NO_ERROR;
14185
14186 LOGD("Start all channels ");
14187 // Start the Streams/Channels
14188 if (mMetadataChannel) {
14189 /* If content of mStreamInfo is not 0, there is metadata stream */
14190 rc = mMetadataChannel->start();
14191 if (rc < 0) {
14192 LOGE("META channel start failed");
14193 return rc;
14194 }
14195 }
14196 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14197 it != mStreamInfo.end(); it++) {
14198 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14199 if (channel) {
14200 rc = channel->start();
14201 if (rc < 0) {
14202 LOGE("channel start failed");
14203 return rc;
14204 }
14205 }
14206 }
14207 if (mAnalysisChannel) {
14208 mAnalysisChannel->start();
14209 }
14210 if (mSupportChannel) {
14211 rc = mSupportChannel->start();
14212 if (rc < 0) {
14213 LOGE("Support channel start failed");
14214 return rc;
14215 }
14216 }
14217 if (mRawDumpChannel) {
14218 rc = mRawDumpChannel->start();
14219 if (rc < 0) {
14220 LOGE("RAW dump channel start failed");
14221 return rc;
14222 }
14223 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014224 if (mHdrPlusRawSrcChannel) {
14225 rc = mHdrPlusRawSrcChannel->start();
14226 if (rc < 0) {
14227 LOGE("HDR+ RAW channel start failed");
14228 return rc;
14229 }
14230 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014231
14232 LOGD("All channels started");
14233 return rc;
14234}
14235
14236/*===========================================================================
14237 * FUNCTION : notifyErrorForPendingRequests
14238 *
14239 * DESCRIPTION: This function sends error for all the pending requests/buffers
14240 *
14241 * PARAMETERS : None
14242 *
14243 * RETURN : Error codes
14244 * NO_ERROR on success
14245 *
14246 *==========================================================================*/
14247int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14248{
Emilian Peev7650c122017-01-19 08:24:33 -080014249 notifyErrorFoPendingDepthData(mDepthChannel);
14250
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014251 auto pendingRequest = mPendingRequestsList.begin();
14252 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014253
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014254 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14255 // buffers (for which buffers aren't sent yet).
14256 while (pendingRequest != mPendingRequestsList.end() ||
14257 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14258 if (pendingRequest == mPendingRequestsList.end() ||
14259 pendingBuffer->frame_number < pendingRequest->frame_number) {
14260 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14261 // with error.
14262 for (auto &info : pendingBuffer->mPendingBufferList) {
14263 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014264 camera3_notify_msg_t notify_msg;
14265 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14266 notify_msg.type = CAMERA3_MSG_ERROR;
14267 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014268 notify_msg.message.error.error_stream = info.stream;
14269 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014270 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014271
14272 camera3_stream_buffer_t buffer = {};
14273 buffer.acquire_fence = -1;
14274 buffer.release_fence = -1;
14275 buffer.buffer = info.buffer;
14276 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14277 buffer.stream = info.stream;
14278 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014279 }
14280
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014281 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14282 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14283 pendingBuffer->frame_number > pendingRequest->frame_number) {
14284 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014285 camera3_notify_msg_t notify_msg;
14286 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14287 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014288 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14289 notify_msg.message.error.error_stream = nullptr;
14290 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014291 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014292
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014293 if (pendingRequest->input_buffer != nullptr) {
14294 camera3_capture_result result = {};
14295 result.frame_number = pendingRequest->frame_number;
14296 result.result = nullptr;
14297 result.input_buffer = pendingRequest->input_buffer;
14298 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014299 }
14300
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014301 mShutterDispatcher.clear(pendingRequest->frame_number);
14302 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14303 } else {
14304 // If both buffers and result metadata weren't sent yet, notify about a request error
14305 // and return buffers with error.
14306 for (auto &info : pendingBuffer->mPendingBufferList) {
14307 camera3_notify_msg_t notify_msg;
14308 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14309 notify_msg.type = CAMERA3_MSG_ERROR;
14310 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14311 notify_msg.message.error.error_stream = info.stream;
14312 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14313 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014314
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014315 camera3_stream_buffer_t buffer = {};
14316 buffer.acquire_fence = -1;
14317 buffer.release_fence = -1;
14318 buffer.buffer = info.buffer;
14319 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14320 buffer.stream = info.stream;
14321 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14322 }
14323
14324 if (pendingRequest->input_buffer != nullptr) {
14325 camera3_capture_result result = {};
14326 result.frame_number = pendingRequest->frame_number;
14327 result.result = nullptr;
14328 result.input_buffer = pendingRequest->input_buffer;
14329 orchestrateResult(&result);
14330 }
14331
14332 mShutterDispatcher.clear(pendingRequest->frame_number);
14333 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14334 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014335 }
14336 }
14337
14338 /* Reset pending frame Drop list and requests list */
14339 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014340 mShutterDispatcher.clear();
14341 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014342 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014343 mExpectedFrameDuration = 0;
14344 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014345 LOGH("Cleared all the pending buffers ");
14346
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014347 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014348}
14349
14350bool QCamera3HardwareInterface::isOnEncoder(
14351 const cam_dimension_t max_viewfinder_size,
14352 uint32_t width, uint32_t height)
14353{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014354 return ((width > (uint32_t)max_viewfinder_size.width) ||
14355 (height > (uint32_t)max_viewfinder_size.height) ||
14356 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14357 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014358}
14359
14360/*===========================================================================
14361 * FUNCTION : setBundleInfo
14362 *
14363 * DESCRIPTION: Set bundle info for all streams that are bundle.
14364 *
14365 * PARAMETERS : None
14366 *
14367 * RETURN : NO_ERROR on success
14368 * Error codes on failure
14369 *==========================================================================*/
14370int32_t QCamera3HardwareInterface::setBundleInfo()
14371{
14372 int32_t rc = NO_ERROR;
14373
14374 if (mChannelHandle) {
14375 cam_bundle_config_t bundleInfo;
14376 memset(&bundleInfo, 0, sizeof(bundleInfo));
14377 rc = mCameraHandle->ops->get_bundle_info(
14378 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14379 if (rc != NO_ERROR) {
14380 LOGE("get_bundle_info failed");
14381 return rc;
14382 }
14383 if (mAnalysisChannel) {
14384 mAnalysisChannel->setBundleInfo(bundleInfo);
14385 }
14386 if (mSupportChannel) {
14387 mSupportChannel->setBundleInfo(bundleInfo);
14388 }
14389 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14390 it != mStreamInfo.end(); it++) {
14391 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14392 channel->setBundleInfo(bundleInfo);
14393 }
14394 if (mRawDumpChannel) {
14395 mRawDumpChannel->setBundleInfo(bundleInfo);
14396 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014397 if (mHdrPlusRawSrcChannel) {
14398 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14399 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014400 }
14401
14402 return rc;
14403}
14404
14405/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014406 * FUNCTION : setInstantAEC
14407 *
14408 * DESCRIPTION: Set Instant AEC related params.
14409 *
14410 * PARAMETERS :
14411 * @meta: CameraMetadata reference
14412 *
14413 * RETURN : NO_ERROR on success
14414 * Error codes on failure
14415 *==========================================================================*/
14416int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14417{
14418 int32_t rc = NO_ERROR;
14419 uint8_t val = 0;
14420 char prop[PROPERTY_VALUE_MAX];
14421
14422 // First try to configure instant AEC from framework metadata
14423 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14424 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14425 }
14426
14427 // If framework did not set this value, try to read from set prop.
14428 if (val == 0) {
14429 memset(prop, 0, sizeof(prop));
14430 property_get("persist.camera.instant.aec", prop, "0");
14431 val = (uint8_t)atoi(prop);
14432 }
14433
14434 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14435 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14436 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14437 mInstantAEC = val;
14438 mInstantAECSettledFrameNumber = 0;
14439 mInstantAecFrameIdxCount = 0;
14440 LOGH("instantAEC value set %d",val);
14441 if (mInstantAEC) {
14442 memset(prop, 0, sizeof(prop));
14443 property_get("persist.camera.ae.instant.bound", prop, "10");
14444 int32_t aec_frame_skip_cnt = atoi(prop);
14445 if (aec_frame_skip_cnt >= 0) {
14446 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14447 } else {
14448 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14449 rc = BAD_VALUE;
14450 }
14451 }
14452 } else {
14453 LOGE("Bad instant aec value set %d", val);
14454 rc = BAD_VALUE;
14455 }
14456 return rc;
14457}
14458
14459/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014460 * FUNCTION : get_num_overall_buffers
14461 *
14462 * DESCRIPTION: Estimate number of pending buffers across all requests.
14463 *
14464 * PARAMETERS : None
14465 *
14466 * RETURN : Number of overall pending buffers
14467 *
14468 *==========================================================================*/
14469uint32_t PendingBuffersMap::get_num_overall_buffers()
14470{
14471 uint32_t sum_buffers = 0;
14472 for (auto &req : mPendingBuffersInRequest) {
14473 sum_buffers += req.mPendingBufferList.size();
14474 }
14475 return sum_buffers;
14476}
14477
14478/*===========================================================================
14479 * FUNCTION : removeBuf
14480 *
14481 * DESCRIPTION: Remove a matching buffer from tracker.
14482 *
14483 * PARAMETERS : @buffer: image buffer for the callback
14484 *
14485 * RETURN : None
14486 *
14487 *==========================================================================*/
14488void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14489{
14490 bool buffer_found = false;
14491 for (auto req = mPendingBuffersInRequest.begin();
14492 req != mPendingBuffersInRequest.end(); req++) {
14493 for (auto k = req->mPendingBufferList.begin();
14494 k != req->mPendingBufferList.end(); k++ ) {
14495 if (k->buffer == buffer) {
14496 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14497 req->frame_number, buffer);
14498 k = req->mPendingBufferList.erase(k);
14499 if (req->mPendingBufferList.empty()) {
14500 // Remove this request from Map
14501 req = mPendingBuffersInRequest.erase(req);
14502 }
14503 buffer_found = true;
14504 break;
14505 }
14506 }
14507 if (buffer_found) {
14508 break;
14509 }
14510 }
14511 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14512 get_num_overall_buffers());
14513}
14514
14515/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014516 * FUNCTION : getBufErrStatus
14517 *
14518 * DESCRIPTION: get buffer error status
14519 *
14520 * PARAMETERS : @buffer: buffer handle
14521 *
14522 * RETURN : Error status
14523 *
14524 *==========================================================================*/
14525int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14526{
14527 for (auto& req : mPendingBuffersInRequest) {
14528 for (auto& k : req.mPendingBufferList) {
14529 if (k.buffer == buffer)
14530 return k.bufStatus;
14531 }
14532 }
14533 return CAMERA3_BUFFER_STATUS_OK;
14534}
14535
14536/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014537 * FUNCTION : setPAAFSupport
14538 *
14539 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14540 * feature mask according to stream type and filter
14541 * arrangement
14542 *
14543 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14544 * @stream_type: stream type
14545 * @filter_arrangement: filter arrangement
14546 *
14547 * RETURN : None
14548 *==========================================================================*/
14549void QCamera3HardwareInterface::setPAAFSupport(
14550 cam_feature_mask_t& feature_mask,
14551 cam_stream_type_t stream_type,
14552 cam_color_filter_arrangement_t filter_arrangement)
14553{
Thierry Strudel3d639192016-09-09 11:52:26 -070014554 switch (filter_arrangement) {
14555 case CAM_FILTER_ARRANGEMENT_RGGB:
14556 case CAM_FILTER_ARRANGEMENT_GRBG:
14557 case CAM_FILTER_ARRANGEMENT_GBRG:
14558 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014559 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14560 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014561 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014562 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14563 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014564 }
14565 break;
14566 case CAM_FILTER_ARRANGEMENT_Y:
14567 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14568 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14569 }
14570 break;
14571 default:
14572 break;
14573 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014574 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14575 feature_mask, stream_type, filter_arrangement);
14576
14577
Thierry Strudel3d639192016-09-09 11:52:26 -070014578}
14579
14580/*===========================================================================
14581* FUNCTION : getSensorMountAngle
14582*
14583* DESCRIPTION: Retrieve sensor mount angle
14584*
14585* PARAMETERS : None
14586*
14587* RETURN : sensor mount angle in uint32_t
14588*==========================================================================*/
14589uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14590{
14591 return gCamCapability[mCameraId]->sensor_mount_angle;
14592}
14593
14594/*===========================================================================
14595* FUNCTION : getRelatedCalibrationData
14596*
14597* DESCRIPTION: Retrieve related system calibration data
14598*
14599* PARAMETERS : None
14600*
14601* RETURN : Pointer of related system calibration data
14602*==========================================================================*/
14603const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14604{
14605 return (const cam_related_system_calibration_data_t *)
14606 &(gCamCapability[mCameraId]->related_cam_calibration);
14607}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014608
14609/*===========================================================================
14610 * FUNCTION : is60HzZone
14611 *
14612 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14613 *
14614 * PARAMETERS : None
14615 *
14616 * RETURN : True if in 60Hz zone, False otherwise
14617 *==========================================================================*/
14618bool QCamera3HardwareInterface::is60HzZone()
14619{
14620 time_t t = time(NULL);
14621 struct tm lt;
14622
14623 struct tm* r = localtime_r(&t, &lt);
14624
14625 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14626 return true;
14627 else
14628 return false;
14629}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014630
14631/*===========================================================================
14632 * FUNCTION : adjustBlackLevelForCFA
14633 *
14634 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14635 * of bayer CFA (Color Filter Array).
14636 *
14637 * PARAMETERS : @input: black level pattern in the order of RGGB
14638 * @output: black level pattern in the order of CFA
14639 * @color_arrangement: CFA color arrangement
14640 *
14641 * RETURN : None
14642 *==========================================================================*/
14643template<typename T>
14644void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14645 T input[BLACK_LEVEL_PATTERN_CNT],
14646 T output[BLACK_LEVEL_PATTERN_CNT],
14647 cam_color_filter_arrangement_t color_arrangement)
14648{
14649 switch (color_arrangement) {
14650 case CAM_FILTER_ARRANGEMENT_GRBG:
14651 output[0] = input[1];
14652 output[1] = input[0];
14653 output[2] = input[3];
14654 output[3] = input[2];
14655 break;
14656 case CAM_FILTER_ARRANGEMENT_GBRG:
14657 output[0] = input[2];
14658 output[1] = input[3];
14659 output[2] = input[0];
14660 output[3] = input[1];
14661 break;
14662 case CAM_FILTER_ARRANGEMENT_BGGR:
14663 output[0] = input[3];
14664 output[1] = input[2];
14665 output[2] = input[1];
14666 output[3] = input[0];
14667 break;
14668 case CAM_FILTER_ARRANGEMENT_RGGB:
14669 output[0] = input[0];
14670 output[1] = input[1];
14671 output[2] = input[2];
14672 output[3] = input[3];
14673 break;
14674 default:
14675 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14676 break;
14677 }
14678}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014679
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014680void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14681 CameraMetadata &resultMetadata,
14682 std::shared_ptr<metadata_buffer_t> settings)
14683{
14684 if (settings == nullptr) {
14685 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14686 return;
14687 }
14688
14689 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14690 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14691 }
14692
14693 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14694 String8 str((const char *)gps_methods);
14695 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14696 }
14697
14698 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14699 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14700 }
14701
14702 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14703 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14704 }
14705
14706 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14707 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14708 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14709 }
14710
14711 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14712 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14713 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14714 }
14715
14716 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14717 int32_t fwk_thumb_size[2];
14718 fwk_thumb_size[0] = thumb_size->width;
14719 fwk_thumb_size[1] = thumb_size->height;
14720 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14721 }
14722
14723 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14724 uint8_t fwk_intent = intent[0];
14725 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14726 }
14727}
14728
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014729bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
14730 const camera3_capture_request_t &request, const CameraMetadata &metadata) {
Chien-Yu Chenec328c82017-08-30 16:41:08 -070014731 if (metadata.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
14732 metadata.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 1) {
14733 ALOGV("%s: NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS is 1", __FUNCTION__);
14734 return false;
14735 }
14736
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014737 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14738 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14739 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014740 ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014741 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014742 return false;
14743 }
14744
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014745 if (!metadata.exists(ANDROID_EDGE_MODE) ||
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014746 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14747 ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014748 return false;
14749 }
14750
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014751 if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
14752 metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
14753 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
14754 ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
14755 return false;
14756 }
14757
14758 if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
14759 (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
14760 metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
14761 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
14762 ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
14763 return false;
14764 }
14765
14766 if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
14767 metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
14768 ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
14769 return false;
14770 }
14771
14772 if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
14773 metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
14774 ANDROID_CONTROL_EFFECT_MODE_OFF) {
14775 ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
14776 return false;
14777 }
14778
14779 if (!metadata.exists(ANDROID_CONTROL_MODE) ||
14780 (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
14781 metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
14782 ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
14783 ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
14784 return false;
14785 }
14786
14787 // TODO (b/32585046): support non-ZSL.
14788 if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
14789 metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
14790 ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
14791 return false;
14792 }
14793
14794 // TODO (b/32586081): support flash.
14795 if (!metadata.exists(ANDROID_FLASH_MODE) ||
14796 metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
14797 ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
14798 return false;
14799 }
14800
14801 // TODO (b/36492953): support digital zoom.
14802 if (!metadata.exists(ANDROID_SCALER_CROP_REGION) ||
14803 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[0] != 0 ||
14804 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[1] != 0 ||
14805 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[2] !=
14806 gCamCapability[mCameraId]->active_array_size.width ||
14807 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[3] !=
14808 gCamCapability[mCameraId]->active_array_size.height) {
14809 ALOGV("%s: ANDROID_SCALER_CROP_REGION is not the same as active array region.",
14810 __FUNCTION__);
14811 return false;
14812 }
14813
14814 if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
14815 metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
14816 ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
14817 return false;
14818 }
14819
14820 // TODO (b/36693254, b/36690506): support other outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014821 if (request.num_output_buffers != 1 ||
14822 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014823 ALOGV("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014824 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014825 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014826 request.output_buffers[0].stream->width,
14827 request.output_buffers[0].stream->height,
14828 request.output_buffers[0].stream->format);
14829 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014830 return false;
14831 }
14832
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014833 return true;
14834}
14835
14836bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14837 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14838 const CameraMetadata &metadata)
14839{
14840 if (hdrPlusRequest == nullptr) return false;
14841 if (!isRequestHdrPlusCompatible(request, metadata)) return false;
14842
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014843 // Get a YUV buffer from pic channel.
14844 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14845 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14846 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14847 if (res != OK) {
14848 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14849 __FUNCTION__, strerror(-res), res);
14850 return false;
14851 }
14852
14853 pbcamera::StreamBuffer buffer;
14854 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014855 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chencec36ed2017-07-21 13:54:29 -070014856 buffer.data = yuvBuffer->fd == -1 ? yuvBuffer->buffer : nullptr;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014857 buffer.dataSize = yuvBuffer->frame_len;
14858
14859 pbcamera::CaptureRequest pbRequest;
14860 pbRequest.id = request.frame_number;
14861 pbRequest.outputBuffers.push_back(buffer);
14862
14863 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070014864 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014865 if (res != OK) {
14866 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14867 strerror(-res), res);
14868 return false;
14869 }
14870
14871 hdrPlusRequest->yuvBuffer = yuvBuffer;
14872 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14873
14874 return true;
14875}
14876
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014877status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14878{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014879 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14880 return OK;
14881 }
14882
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014883 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014884 if (res != OK) {
14885 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14886 strerror(-res), res);
14887 return res;
14888 }
14889 gHdrPlusClientOpening = true;
14890
14891 return OK;
14892}
14893
Chien-Yu Chenee335912017-02-09 17:53:20 -080014894status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14895{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014896 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014897
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014898 if (mHdrPlusModeEnabled) {
14899 return OK;
14900 }
14901
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014902 // Check if gHdrPlusClient is opened or being opened.
14903 if (gHdrPlusClient == nullptr) {
14904 if (gHdrPlusClientOpening) {
14905 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14906 return OK;
14907 }
14908
14909 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014910 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014911 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14912 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014913 return res;
14914 }
14915
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014916 // When opening HDR+ client completes, HDR+ mode will be enabled.
14917 return OK;
14918
Chien-Yu Chenee335912017-02-09 17:53:20 -080014919 }
14920
14921 // Configure stream for HDR+.
14922 res = configureHdrPlusStreamsLocked();
14923 if (res != OK) {
14924 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014925 return res;
14926 }
14927
14928 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14929 res = gHdrPlusClient->setZslHdrPlusMode(true);
14930 if (res != OK) {
14931 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014932 return res;
14933 }
14934
14935 mHdrPlusModeEnabled = true;
14936 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14937
14938 return OK;
14939}
14940
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014941void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
14942{
14943 if (gHdrPlusClientOpening) {
14944 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
14945 }
14946}
14947
Chien-Yu Chenee335912017-02-09 17:53:20 -080014948void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14949{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014950 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014951 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014952 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14953 if (res != OK) {
14954 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14955 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014956
14957 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014958 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014959 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014960 }
14961
14962 mHdrPlusModeEnabled = false;
14963 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14964}
14965
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070014966bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
14967{
14968 // Check if mPictureChannel is valid.
14969 // TODO: Support YUV (b/36693254) and RAW (b/36690506)
14970 if (mPictureChannel == nullptr) {
14971 return false;
14972 }
14973
14974 return true;
14975}
14976
Chien-Yu Chenee335912017-02-09 17:53:20 -080014977status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014978{
14979 pbcamera::InputConfiguration inputConfig;
14980 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14981 status_t res = OK;
14982
14983 // Configure HDR+ client streams.
14984 // Get input config.
14985 if (mHdrPlusRawSrcChannel) {
14986 // HDR+ input buffers will be provided by HAL.
14987 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14988 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14989 if (res != OK) {
14990 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14991 __FUNCTION__, strerror(-res), res);
14992 return res;
14993 }
14994
14995 inputConfig.isSensorInput = false;
14996 } else {
14997 // Sensor MIPI will send data to Easel.
14998 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014999 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015000 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
15001 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
15002 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
15003 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
15004 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070015005 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015006 if (mSensorModeInfo.num_raw_bits != 10) {
15007 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
15008 mSensorModeInfo.num_raw_bits);
15009 return BAD_VALUE;
15010 }
15011
15012 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015013 }
15014
15015 // Get output configurations.
15016 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080015017 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015018
15019 // Easel may need to output YUV output buffers if mPictureChannel was created.
15020 pbcamera::StreamConfiguration yuvOutputConfig;
15021 if (mPictureChannel != nullptr) {
15022 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
15023 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
15024 if (res != OK) {
15025 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
15026 __FUNCTION__, strerror(-res), res);
15027
15028 return res;
15029 }
15030
15031 outputStreamConfigs.push_back(yuvOutputConfig);
15032 }
15033
15034 // TODO: consider other channels for YUV output buffers.
15035
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015036 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015037 if (res != OK) {
15038 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
15039 strerror(-res), res);
15040 return res;
15041 }
15042
15043 return OK;
15044}
15045
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015046void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
15047{
15048 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
15049 // Set HAL state to error.
15050 pthread_mutex_lock(&mMutex);
15051 mState = ERROR;
15052 pthread_mutex_unlock(&mMutex);
15053
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015054 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015055}
15056
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015057void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15058{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015059 if (client == nullptr) {
15060 ALOGE("%s: Opened client is null.", __FUNCTION__);
15061 return;
15062 }
15063
Chien-Yu Chene96475e2017-04-11 11:53:26 -070015064 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015065 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15066
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015067 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015068 if (!gHdrPlusClientOpening) {
15069 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15070 return;
15071 }
15072
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015073 gHdrPlusClient = std::move(client);
15074 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015075 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015076
15077 // Set static metadata.
15078 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15079 if (res != OK) {
15080 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15081 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015082 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015083 gHdrPlusClient = nullptr;
15084 return;
15085 }
15086
15087 // Enable HDR+ mode.
15088 res = enableHdrPlusModeLocked();
15089 if (res != OK) {
15090 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15091 }
15092}
15093
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015094void QCamera3HardwareInterface::onOpenFailed(status_t err)
15095{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015096 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015097 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015098 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015099 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015100}
15101
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015102void QCamera3HardwareInterface::onFatalError()
15103{
15104 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
15105
15106 // Set HAL state to error.
15107 pthread_mutex_lock(&mMutex);
15108 mState = ERROR;
15109 pthread_mutex_unlock(&mMutex);
15110
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015111 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015112}
15113
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070015114void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15115{
15116 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15117 __LINE__, requestId, apSensorTimestampNs);
15118
15119 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15120}
15121
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015122void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
15123{
15124 pthread_mutex_lock(&mMutex);
15125
15126 // Find the pending request for this result metadata.
15127 auto requestIter = mPendingRequestsList.begin();
15128 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15129 requestIter++;
15130 }
15131
15132 if (requestIter == mPendingRequestsList.end()) {
15133 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15134 pthread_mutex_unlock(&mMutex);
15135 return;
15136 }
15137
15138 requestIter->partial_result_cnt++;
15139
15140 CameraMetadata metadata;
15141 uint8_t ready = true;
15142 metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
15143
15144 // Send it to framework.
15145 camera3_capture_result_t result = {};
15146
15147 result.result = metadata.getAndLock();
15148 // Populate metadata result
15149 result.frame_number = requestId;
15150 result.num_output_buffers = 0;
15151 result.output_buffers = NULL;
15152 result.partial_result = requestIter->partial_result_cnt;
15153
15154 orchestrateResult(&result);
15155 metadata.unlock(result.result);
15156
15157 pthread_mutex_unlock(&mMutex);
15158}
15159
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070015160void QCamera3HardwareInterface::onPostview(uint32_t requestId,
15161 std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
15162 uint32_t stride, int32_t format)
15163{
15164 if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
15165 ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
15166 __LINE__, width, height, requestId);
15167 char buf[FILENAME_MAX] = {};
15168 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
15169 requestId, width, height);
15170
15171 pbcamera::StreamConfiguration config = {};
15172 config.image.width = width;
15173 config.image.height = height;
15174 config.image.format = format;
15175
15176 pbcamera::PlaneConfiguration plane = {};
15177 plane.stride = stride;
15178 plane.scanline = height;
15179
15180 config.image.planes.push_back(plane);
15181
15182 pbcamera::StreamBuffer buffer = {};
15183 buffer.streamId = 0;
15184 buffer.dmaBufFd = -1;
15185 buffer.data = postview->data();
15186 buffer.dataSize = postview->size();
15187
15188 hdrplus_client_utils::writePpm(buf, config, buffer);
15189 }
15190
15191 pthread_mutex_lock(&mMutex);
15192
15193 // Find the pending request for this result metadata.
15194 auto requestIter = mPendingRequestsList.begin();
15195 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15196 requestIter++;
15197 }
15198
15199 if (requestIter == mPendingRequestsList.end()) {
15200 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15201 pthread_mutex_unlock(&mMutex);
15202 return;
15203 }
15204
15205 requestIter->partial_result_cnt++;
15206
15207 CameraMetadata metadata;
15208 int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
15209 static_cast<int32_t>(stride)};
15210 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
15211 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
15212
15213 // Send it to framework.
15214 camera3_capture_result_t result = {};
15215
15216 result.result = metadata.getAndLock();
15217 // Populate metadata result
15218 result.frame_number = requestId;
15219 result.num_output_buffers = 0;
15220 result.output_buffers = NULL;
15221 result.partial_result = requestIter->partial_result_cnt;
15222
15223 orchestrateResult(&result);
15224 metadata.unlock(result.result);
15225
15226 pthread_mutex_unlock(&mMutex);
15227}
15228
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015229void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015230 const camera_metadata_t &resultMetadata)
15231{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015232 if (result != nullptr) {
15233 if (result->outputBuffers.size() != 1) {
15234 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
15235 result->outputBuffers.size());
15236 return;
15237 }
15238
15239 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
15240 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
15241 result->outputBuffers[0].streamId);
15242 return;
15243 }
15244
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015245 // TODO (b/34854987): initiate this from HDR+ service.
15246 onNextCaptureReady(result->requestId);
15247
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015248 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015249 HdrPlusPendingRequest pendingRequest;
15250 {
15251 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15252 auto req = mHdrPlusPendingRequests.find(result->requestId);
15253 pendingRequest = req->second;
15254 }
15255
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015256 // Update the result metadata with the settings of the HDR+ still capture request because
15257 // the result metadata belongs to a ZSL buffer.
15258 CameraMetadata metadata;
15259 metadata = &resultMetadata;
15260 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15261 camera_metadata_t* updatedResultMetadata = metadata.release();
15262
15263 QCamera3PicChannel *picChannel =
15264 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
15265
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015266 // Check if dumping HDR+ YUV output is enabled.
15267 char prop[PROPERTY_VALUE_MAX];
15268 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15269 bool dumpYuvOutput = atoi(prop);
15270
15271 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015272 // Dump yuv buffer to a ppm file.
15273 pbcamera::StreamConfiguration outputConfig;
15274 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
15275 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
15276 if (rc == OK) {
15277 char buf[FILENAME_MAX] = {};
15278 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15279 result->requestId, result->outputBuffers[0].streamId,
15280 outputConfig.image.width, outputConfig.image.height);
15281
15282 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
15283 } else {
15284 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
15285 __FUNCTION__, strerror(-rc), rc);
15286 }
15287 }
15288
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015289 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
15290 auto halMetadata = std::make_shared<metadata_buffer_t>();
15291 clear_metadata_buffer(halMetadata.get());
15292
15293 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
15294 // encoding.
15295 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15296 halStreamId, /*minFrameDuration*/0);
15297 if (res == OK) {
15298 // Return the buffer to pic channel for encoding.
15299 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
15300 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
15301 halMetadata);
15302 } else {
15303 // Return the buffer without encoding.
15304 // TODO: This should not happen but we may want to report an error buffer to camera
15305 // service.
15306 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
15307 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
15308 strerror(-res), res);
15309 }
15310
15311 // Send HDR+ metadata to framework.
15312 {
15313 pthread_mutex_lock(&mMutex);
15314
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015315 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15316 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015317 pthread_mutex_unlock(&mMutex);
15318 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015319
15320 // Remove the HDR+ pending request.
15321 {
15322 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15323 auto req = mHdrPlusPendingRequests.find(result->requestId);
15324 mHdrPlusPendingRequests.erase(req);
15325 }
15326 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015327}
15328
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015329void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15330{
15331 if (failedResult == nullptr) {
15332 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15333 return;
15334 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015335
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015336 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015337
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015338 // Remove the pending HDR+ request.
15339 {
15340 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15341 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
15342
15343 // Return the buffer to pic channel.
15344 QCamera3PicChannel *picChannel =
15345 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
15346 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
15347
15348 mHdrPlusPendingRequests.erase(pendingRequest);
15349 }
15350
15351 pthread_mutex_lock(&mMutex);
15352
15353 // Find the pending buffers.
15354 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15355 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15356 if (pendingBuffers->frame_number == failedResult->requestId) {
15357 break;
15358 }
15359 pendingBuffers++;
15360 }
15361
15362 // Send out buffer errors for the pending buffers.
15363 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15364 std::vector<camera3_stream_buffer_t> streamBuffers;
15365 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15366 // Prepare a stream buffer.
15367 camera3_stream_buffer_t streamBuffer = {};
15368 streamBuffer.stream = buffer.stream;
15369 streamBuffer.buffer = buffer.buffer;
15370 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15371 streamBuffer.acquire_fence = -1;
15372 streamBuffer.release_fence = -1;
15373
15374 streamBuffers.push_back(streamBuffer);
15375
15376 // Send out error buffer event.
15377 camera3_notify_msg_t notify_msg = {};
15378 notify_msg.type = CAMERA3_MSG_ERROR;
15379 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15380 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15381 notify_msg.message.error.error_stream = buffer.stream;
15382
15383 orchestrateNotify(&notify_msg);
15384 }
15385
15386 camera3_capture_result_t result = {};
15387 result.frame_number = pendingBuffers->frame_number;
15388 result.num_output_buffers = streamBuffers.size();
15389 result.output_buffers = &streamBuffers[0];
15390
15391 // Send out result with buffer errors.
15392 orchestrateResult(&result);
15393
15394 // Remove pending buffers.
15395 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15396 }
15397
15398 // Remove pending request.
15399 auto halRequest = mPendingRequestsList.begin();
15400 while (halRequest != mPendingRequestsList.end()) {
15401 if (halRequest->frame_number == failedResult->requestId) {
15402 mPendingRequestsList.erase(halRequest);
15403 break;
15404 }
15405 halRequest++;
15406 }
15407
15408 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015409}
15410
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015411
15412ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15413 mParent(parent) {}
15414
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015415void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015416{
15417 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015418
15419 if (isReprocess) {
15420 mReprocessShutters.emplace(frameNumber, Shutter());
15421 } else {
15422 mShutters.emplace(frameNumber, Shutter());
15423 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015424}
15425
15426void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15427{
15428 std::lock_guard<std::mutex> lock(mLock);
15429
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015430 std::map<uint32_t, Shutter> *shutters = nullptr;
15431
15432 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015433 auto shutter = mShutters.find(frameNumber);
15434 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015435 shutter = mReprocessShutters.find(frameNumber);
15436 if (shutter == mReprocessShutters.end()) {
15437 // Shutter was already sent.
15438 return;
15439 }
15440 shutters = &mReprocessShutters;
15441 } else {
15442 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015443 }
15444
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015445 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015446 shutter->second.ready = true;
15447 shutter->second.timestamp = timestamp;
15448
15449 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015450 shutter = shutters->begin();
15451 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015452 if (!shutter->second.ready) {
15453 // If this shutter is not ready, the following shutters can't be sent.
15454 break;
15455 }
15456
15457 camera3_notify_msg_t msg = {};
15458 msg.type = CAMERA3_MSG_SHUTTER;
15459 msg.message.shutter.frame_number = shutter->first;
15460 msg.message.shutter.timestamp = shutter->second.timestamp;
15461 mParent->orchestrateNotify(&msg);
15462
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015463 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015464 }
15465}
15466
15467void ShutterDispatcher::clear(uint32_t frameNumber)
15468{
15469 std::lock_guard<std::mutex> lock(mLock);
15470 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015471 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015472}
15473
15474void ShutterDispatcher::clear()
15475{
15476 std::lock_guard<std::mutex> lock(mLock);
15477
15478 // Log errors for stale shutters.
15479 for (auto &shutter : mShutters) {
15480 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15481 __FUNCTION__, shutter.first, shutter.second.ready,
15482 shutter.second.timestamp);
15483 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015484
15485 // Log errors for stale reprocess shutters.
15486 for (auto &shutter : mReprocessShutters) {
15487 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15488 __FUNCTION__, shutter.first, shutter.second.ready,
15489 shutter.second.timestamp);
15490 }
15491
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015492 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015493 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015494}
15495
15496OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15497 mParent(parent) {}
15498
15499status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15500{
15501 std::lock_guard<std::mutex> lock(mLock);
15502 mStreamBuffers.clear();
15503 if (!streamList) {
15504 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15505 return -EINVAL;
15506 }
15507
15508 // Create a "frame-number -> buffer" map for each stream.
15509 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15510 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15511 }
15512
15513 return OK;
15514}
15515
15516status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15517{
15518 std::lock_guard<std::mutex> lock(mLock);
15519
15520 // Find the "frame-number -> buffer" map for the stream.
15521 auto buffers = mStreamBuffers.find(stream);
15522 if (buffers == mStreamBuffers.end()) {
15523 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15524 return -EINVAL;
15525 }
15526
15527 // Create an unready buffer for this frame number.
15528 buffers->second.emplace(frameNumber, Buffer());
15529 return OK;
15530}
15531
15532void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15533 const camera3_stream_buffer_t &buffer)
15534{
15535 std::lock_guard<std::mutex> lock(mLock);
15536
15537 // Find the frame number -> buffer map for the stream.
15538 auto buffers = mStreamBuffers.find(buffer.stream);
15539 if (buffers == mStreamBuffers.end()) {
15540 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15541 return;
15542 }
15543
15544 // Find the unready buffer this frame number and mark it ready.
15545 auto pendingBuffer = buffers->second.find(frameNumber);
15546 if (pendingBuffer == buffers->second.end()) {
15547 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15548 return;
15549 }
15550
15551 pendingBuffer->second.ready = true;
15552 pendingBuffer->second.buffer = buffer;
15553
15554 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15555 pendingBuffer = buffers->second.begin();
15556 while (pendingBuffer != buffers->second.end()) {
15557 if (!pendingBuffer->second.ready) {
15558 // If this buffer is not ready, the following buffers can't be sent.
15559 break;
15560 }
15561
15562 camera3_capture_result_t result = {};
15563 result.frame_number = pendingBuffer->first;
15564 result.num_output_buffers = 1;
15565 result.output_buffers = &pendingBuffer->second.buffer;
15566
15567 // Send out result with buffer errors.
15568 mParent->orchestrateResult(&result);
15569
15570 pendingBuffer = buffers->second.erase(pendingBuffer);
15571 }
15572}
15573
15574void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15575{
15576 std::lock_guard<std::mutex> lock(mLock);
15577
15578 // Log errors for stale buffers.
15579 for (auto &buffers : mStreamBuffers) {
15580 for (auto &buffer : buffers.second) {
15581 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15582 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15583 }
15584 buffers.second.clear();
15585 }
15586
15587 if (clearConfiguredStreams) {
15588 mStreamBuffers.clear();
15589 }
15590}
15591
Thierry Strudel3d639192016-09-09 11:52:26 -070015592}; //end namespace qcamera