blob: 3e91c901a001056fd196c4300eca380404c5c950 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070075// mm_camera has 2 partial results: 3A, and final result.
76// HDR+ requests have 3 partial results: postview, next request ready, and final result.
77#define PARTIAL_RESULT_COUNT 3
Thierry Strudel3d639192016-09-09 11:52:26 -070078#define FRAME_SKIP_DELAY 0
79
80#define MAX_VALUE_8BIT ((1<<8)-1)
81#define MAX_VALUE_10BIT ((1<<10)-1)
82#define MAX_VALUE_12BIT ((1<<12)-1)
83
84#define VIDEO_4K_WIDTH 3840
85#define VIDEO_4K_HEIGHT 2160
86
Jason Leeb9e76432017-03-10 17:14:19 -080087#define MAX_EIS_WIDTH 3840
88#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070089
90#define MAX_RAW_STREAMS 1
91#define MAX_STALLING_STREAMS 1
92#define MAX_PROCESSED_STREAMS 3
93/* Batch mode is enabled only if FPS set is equal to or greater than this */
94#define MIN_FPS_FOR_BATCH_MODE (120)
95#define PREVIEW_FPS_FOR_HFR (30)
96#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080097#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070098#define MAX_HFR_BATCH_SIZE (8)
99#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +0100101#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700137// Whether to check for the GPU stride padding, or use the default
138//#define CHECK_GPU_PIXEL_ALIGNMENT
139
Thierry Strudel3d639192016-09-09 11:52:26 -0700140cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142extern pthread_mutex_t gCamLock;
143volatile uint32_t gCamHal3LogLevel = 1;
144extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800146// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700148std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700152std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -0700155bool gEnableMultipleHdrplusOutputs = false; // Whether to enable multiple output from Easel HDR+.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800157// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
158bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700159
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700160std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700161
Thierry Strudel3d639192016-09-09 11:52:26 -0700162
163const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
164 {"On", CAM_CDS_MODE_ON},
165 {"Off", CAM_CDS_MODE_OFF},
166 {"Auto",CAM_CDS_MODE_AUTO}
167};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700168const QCamera3HardwareInterface::QCameraMap<
169 camera_metadata_enum_android_video_hdr_mode_t,
170 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
171 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
172 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
173};
174
Thierry Strudel54dc9782017-02-15 12:12:10 -0800175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_binning_correction_mode_t,
177 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
178 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
179 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
180};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700181
182const QCamera3HardwareInterface::QCameraMap<
183 camera_metadata_enum_android_ir_mode_t,
184 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
185 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
186 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
187 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
188};
Thierry Strudel3d639192016-09-09 11:52:26 -0700189
190const QCamera3HardwareInterface::QCameraMap<
191 camera_metadata_enum_android_control_effect_mode_t,
192 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
193 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
194 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
195 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
196 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
197 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
198 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
199 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
202};
203
204const QCamera3HardwareInterface::QCameraMap<
205 camera_metadata_enum_android_control_awb_mode_t,
206 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
207 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
208 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
209 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
210 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
211 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
212 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
215 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
216};
217
218const QCamera3HardwareInterface::QCameraMap<
219 camera_metadata_enum_android_control_scene_mode_t,
220 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
221 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
222 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
223 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
227 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
228 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
229 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
230 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
231 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
232 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
233 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
234 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
235 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800236 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
237 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_control_af_mode_t,
242 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
245 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
246 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
247 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_color_correction_aberration_mode_t,
254 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
255 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
256 CAM_COLOR_CORRECTION_ABERRATION_OFF },
257 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
258 CAM_COLOR_CORRECTION_ABERRATION_FAST },
259 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
260 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_control_ae_antibanding_mode_t,
265 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
270};
271
272const QCamera3HardwareInterface::QCameraMap<
273 camera_metadata_enum_android_control_ae_mode_t,
274 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
275 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
278 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
Shuzhen Wang3d11a642017-08-18 09:57:29 -0700279 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
280 { (camera_metadata_enum_android_control_ae_mode_t)
281 NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
Thierry Strudel3d639192016-09-09 11:52:26 -0700282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_flash_mode_t,
286 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
287 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
288 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
289 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_statistics_face_detect_mode_t,
294 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
297 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
298};
299
300const QCamera3HardwareInterface::QCameraMap<
301 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
302 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
303 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
304 CAM_FOCUS_UNCALIBRATED },
305 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
306 CAM_FOCUS_APPROXIMATE },
307 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
308 CAM_FOCUS_CALIBRATED }
309};
310
311const QCamera3HardwareInterface::QCameraMap<
312 camera_metadata_enum_android_lens_state_t,
313 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
314 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
315 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
316};
317
318const int32_t available_thumbnail_sizes[] = {0, 0,
319 176, 144,
320 240, 144,
321 256, 144,
322 240, 160,
323 256, 154,
324 240, 240,
325 320, 240};
326
327const QCamera3HardwareInterface::QCameraMap<
328 camera_metadata_enum_android_sensor_test_pattern_mode_t,
329 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
335 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
336};
337
338/* Since there is no mapping for all the options some Android enum are not listed.
339 * Also, the order in this list is important because while mapping from HAL to Android it will
340 * traverse from lower to higher index which means that for HAL values that are map to different
341 * Android values, the traverse logic will select the first one found.
342 */
343const QCamera3HardwareInterface::QCameraMap<
344 camera_metadata_enum_android_sensor_reference_illuminant1_t,
345 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
361 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
362};
363
364const QCamera3HardwareInterface::QCameraMap<
365 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
366 { 60, CAM_HFR_MODE_60FPS},
367 { 90, CAM_HFR_MODE_90FPS},
368 { 120, CAM_HFR_MODE_120FPS},
369 { 150, CAM_HFR_MODE_150FPS},
370 { 180, CAM_HFR_MODE_180FPS},
371 { 210, CAM_HFR_MODE_210FPS},
372 { 240, CAM_HFR_MODE_240FPS},
373 { 480, CAM_HFR_MODE_480FPS},
374};
375
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_instant_aec_mode_t,
378 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
379 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
381 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
382};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800383
384const QCamera3HardwareInterface::QCameraMap<
385 qcamera3_ext_exposure_meter_mode_t,
386 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
387 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
388 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
389 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
390 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
391 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
392 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
393 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
394};
395
396const QCamera3HardwareInterface::QCameraMap<
397 qcamera3_ext_iso_mode_t,
398 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
399 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
400 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
401 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
402 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
403 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
404 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
405 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
406 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
407};
408
Thierry Strudel3d639192016-09-09 11:52:26 -0700409camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
410 .initialize = QCamera3HardwareInterface::initialize,
411 .configure_streams = QCamera3HardwareInterface::configure_streams,
412 .register_stream_buffers = NULL,
413 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
414 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
415 .get_metadata_vendor_tag_ops = NULL,
416 .dump = QCamera3HardwareInterface::dump,
417 .flush = QCamera3HardwareInterface::flush,
418 .reserved = {0},
419};
420
421// initialise to some default value
422uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
423
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700424static inline void logEaselEvent(const char *tag, const char *event) {
425 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
426 struct timespec ts = {};
427 static int64_t kMsPerSec = 1000;
428 static int64_t kNsPerMs = 1000000;
429 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
430 if (res != OK) {
431 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
432 } else {
433 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
434 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
435 }
436 }
437}
438
Thierry Strudel3d639192016-09-09 11:52:26 -0700439/*===========================================================================
440 * FUNCTION : QCamera3HardwareInterface
441 *
442 * DESCRIPTION: constructor of QCamera3HardwareInterface
443 *
444 * PARAMETERS :
445 * @cameraId : camera ID
446 *
447 * RETURN : none
448 *==========================================================================*/
449QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
450 const camera_module_callbacks_t *callbacks)
451 : mCameraId(cameraId),
452 mCameraHandle(NULL),
453 mCameraInitialized(false),
454 mCallbackOps(NULL),
455 mMetadataChannel(NULL),
456 mPictureChannel(NULL),
457 mRawChannel(NULL),
458 mSupportChannel(NULL),
459 mAnalysisChannel(NULL),
460 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700461 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700462 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800463 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100464 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800465 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700466 mChannelHandle(0),
467 mFirstConfiguration(true),
468 mFlush(false),
469 mFlushPerf(false),
470 mParamHeap(NULL),
471 mParameters(NULL),
472 mPrevParameters(NULL),
473 m_bIsVideo(false),
474 m_bIs4KVideo(false),
475 m_bEisSupportedSize(false),
476 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800477 m_bEis3PropertyEnabled(false),
Binhao Lin09245482017-08-31 18:25:29 -0700478 m_bAVTimerEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700479 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700480 mShutterDispatcher(this),
481 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mMinProcessedFrameDuration(0),
483 mMinJpegFrameDuration(0),
484 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100485 mExpectedFrameDuration(0),
486 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700487 mMetaFrameCount(0U),
488 mUpdateDebugLevel(false),
489 mCallbacks(callbacks),
490 mCaptureIntent(0),
491 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800492 /* DevCamDebug metadata internal m control*/
493 mDevCamDebugMetaEnable(0),
494 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700495 mBatchSize(0),
496 mToBeQueuedVidBufs(0),
497 mHFRVideoFps(DEFAULT_VIDEO_FPS),
498 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800499 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800500 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mFirstFrameNumberInBatch(0),
502 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800503 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700504 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
505 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000506 mPDSupported(false),
507 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700508 mInstantAEC(false),
509 mResetInstantAEC(false),
510 mInstantAECSettledFrameNumber(0),
511 mAecSkipDisplayFrameBound(0),
512 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700513 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800514 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700515 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700516 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700517 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700518 mState(CLOSED),
519 mIsDeviceLinked(false),
520 mIsMainCamera(true),
521 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700522 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800523 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800524 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700525 mZslEnabled(false),
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700526 mEaselMipiStarted(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800527 mIsApInputUsedForHdrPlus(false),
528 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700529 m_bSensorHDREnabled(false),
Shuzhen Wang3569d4a2017-09-04 19:10:28 -0700530 mAfTrigger(),
531 mSceneDistance(-1)
Thierry Strudel3d639192016-09-09 11:52:26 -0700532{
533 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCommon.init(gCamCapability[cameraId]);
535 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700536#ifndef USE_HAL_3_3
537 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
538#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700539 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700540#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700541 mCameraDevice.common.close = close_camera_device;
542 mCameraDevice.ops = &mCameraOps;
543 mCameraDevice.priv = this;
544 gCamCapability[cameraId]->version = CAM_HAL_V3;
545 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
546 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
547 gCamCapability[cameraId]->min_num_pp_bufs = 3;
548
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800549 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700550
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800551 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700552 mPendingLiveRequest = 0;
553 mCurrentRequestId = -1;
554 pthread_mutex_init(&mMutex, NULL);
555
556 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
557 mDefaultMetadata[i] = NULL;
558
559 // Getting system props of different kinds
560 char prop[PROPERTY_VALUE_MAX];
561 memset(prop, 0, sizeof(prop));
562 property_get("persist.camera.raw.dump", prop, "0");
563 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800564 property_get("persist.camera.hal3.force.hdr", prop, "0");
565 mForceHdrSnapshot = atoi(prop);
566
Thierry Strudel3d639192016-09-09 11:52:26 -0700567 if (mEnableRawDump)
568 LOGD("Raw dump from Camera HAL enabled");
569
570 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
571 memset(mLdafCalib, 0, sizeof(mLdafCalib));
572
Arnd Geis082a4d72017-08-24 10:33:07 -0700573 memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
Arnd Geis8cbfc182017-09-07 14:46:41 -0700574 mEaselFwUpdated = false;
Arnd Geis082a4d72017-08-24 10:33:07 -0700575
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 memset(prop, 0, sizeof(prop));
577 property_get("persist.camera.tnr.preview", prop, "0");
578 m_bTnrPreview = (uint8_t)atoi(prop);
579
580 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800581 property_get("persist.camera.swtnr.preview", prop, "1");
582 m_bSwTnrPreview = (uint8_t)atoi(prop);
583
584 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700585 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700586 m_bTnrVideo = (uint8_t)atoi(prop);
587
588 memset(prop, 0, sizeof(prop));
589 property_get("persist.camera.avtimer.debug", prop, "0");
590 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800591 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700592
Thierry Strudel54dc9782017-02-15 12:12:10 -0800593 memset(prop, 0, sizeof(prop));
594 property_get("persist.camera.cacmode.disable", prop, "0");
595 m_cacModeDisabled = (uint8_t)atoi(prop);
596
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700597 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700598 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700599
Thierry Strudel3d639192016-09-09 11:52:26 -0700600 //Load and read GPU library.
601 lib_surface_utils = NULL;
602 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700603 mSurfaceStridePadding = CAM_PAD_TO_64;
604#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700605 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
606 if (lib_surface_utils) {
607 *(void **)&LINK_get_surface_pixel_alignment =
608 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
609 if (LINK_get_surface_pixel_alignment) {
610 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
611 }
612 dlclose(lib_surface_utils);
613 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700614#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000615 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
616 mPDSupported = (0 <= mPDIndex) ? true : false;
617
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700618 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700619}
620
621/*===========================================================================
622 * FUNCTION : ~QCamera3HardwareInterface
623 *
624 * DESCRIPTION: destructor of QCamera3HardwareInterface
625 *
626 * PARAMETERS : none
627 *
628 * RETURN : none
629 *==========================================================================*/
630QCamera3HardwareInterface::~QCamera3HardwareInterface()
631{
632 LOGD("E");
633
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800634 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700635
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800636 // Disable power hint and enable the perf lock for close camera
637 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
638 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
639
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700640 // Close HDR+ client first before destroying HAL.
641 {
642 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
643 finishHdrPlusClientOpeningLocked(l);
644 if (gHdrPlusClient != nullptr) {
645 // Disable HDR+ mode.
646 disableHdrPlusModeLocked();
647 // Disconnect Easel if it's connected.
648 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
649 gHdrPlusClient = nullptr;
650 }
651 }
652
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800653 // unlink of dualcam during close camera
654 if (mIsDeviceLinked) {
655 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
656 &m_pDualCamCmdPtr->bundle_info;
657 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
658 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
659 pthread_mutex_lock(&gCamLock);
660
661 if (mIsMainCamera == 1) {
662 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
663 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
664 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
665 // related session id should be session id of linked session
666 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
667 } else {
668 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
669 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
670 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
671 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
672 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800673 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800674 pthread_mutex_unlock(&gCamLock);
675
676 rc = mCameraHandle->ops->set_dual_cam_cmd(
677 mCameraHandle->camera_handle);
678 if (rc < 0) {
679 LOGE("Dualcam: Unlink failed, but still proceed to close");
680 }
681 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700682
683 /* We need to stop all streams before deleting any stream */
684 if (mRawDumpChannel) {
685 mRawDumpChannel->stop();
686 }
687
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700688 if (mHdrPlusRawSrcChannel) {
689 mHdrPlusRawSrcChannel->stop();
690 }
691
Thierry Strudel3d639192016-09-09 11:52:26 -0700692 // NOTE: 'camera3_stream_t *' objects are already freed at
693 // this stage by the framework
694 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
695 it != mStreamInfo.end(); it++) {
696 QCamera3ProcessingChannel *channel = (*it)->channel;
697 if (channel) {
698 channel->stop();
699 }
700 }
701 if (mSupportChannel)
702 mSupportChannel->stop();
703
704 if (mAnalysisChannel) {
705 mAnalysisChannel->stop();
706 }
707 if (mMetadataChannel) {
708 mMetadataChannel->stop();
709 }
710 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700711 stopChannelLocked(/*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700712 }
713
714 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
715 it != mStreamInfo.end(); it++) {
716 QCamera3ProcessingChannel *channel = (*it)->channel;
717 if (channel)
718 delete channel;
719 free (*it);
720 }
721 if (mSupportChannel) {
722 delete mSupportChannel;
723 mSupportChannel = NULL;
724 }
725
726 if (mAnalysisChannel) {
727 delete mAnalysisChannel;
728 mAnalysisChannel = NULL;
729 }
730 if (mRawDumpChannel) {
731 delete mRawDumpChannel;
732 mRawDumpChannel = NULL;
733 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700734 if (mHdrPlusRawSrcChannel) {
735 delete mHdrPlusRawSrcChannel;
736 mHdrPlusRawSrcChannel = NULL;
737 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700738 if (mDummyBatchChannel) {
739 delete mDummyBatchChannel;
740 mDummyBatchChannel = NULL;
741 }
742
743 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800744 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700745
746 if (mMetadataChannel) {
747 delete mMetadataChannel;
748 mMetadataChannel = NULL;
749 }
750
751 /* Clean up all channels */
752 if (mCameraInitialized) {
753 if(!mFirstConfiguration){
754 //send the last unconfigure
755 cam_stream_size_info_t stream_config_info;
756 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
757 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
758 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800759 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700760 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700761 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700762 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
763 stream_config_info);
764 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
765 if (rc < 0) {
766 LOGE("set_parms failed for unconfigure");
767 }
768 }
769 deinitParameters();
770 }
771
772 if (mChannelHandle) {
773 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
774 mChannelHandle);
775 LOGH("deleting channel %d", mChannelHandle);
776 mChannelHandle = 0;
777 }
778
779 if (mState != CLOSED)
780 closeCamera();
781
782 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
783 req.mPendingBufferList.clear();
784 }
785 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700786 for (pendingRequestIterator i = mPendingRequestsList.begin();
787 i != mPendingRequestsList.end();) {
788 i = erasePendingRequest(i);
789 }
790 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
791 if (mDefaultMetadata[i])
792 free_camera_metadata(mDefaultMetadata[i]);
793
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800794 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700795
796 pthread_cond_destroy(&mRequestCond);
797
798 pthread_cond_destroy(&mBuffersCond);
799
800 pthread_mutex_destroy(&mMutex);
801 LOGD("X");
802}
803
804/*===========================================================================
805 * FUNCTION : erasePendingRequest
806 *
807 * DESCRIPTION: function to erase a desired pending request after freeing any
808 * allocated memory
809 *
810 * PARAMETERS :
811 * @i : iterator pointing to pending request to be erased
812 *
813 * RETURN : iterator pointing to the next request
814 *==========================================================================*/
815QCamera3HardwareInterface::pendingRequestIterator
816 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
817{
818 if (i->input_buffer != NULL) {
819 free(i->input_buffer);
820 i->input_buffer = NULL;
821 }
822 if (i->settings != NULL)
823 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100824
825 mExpectedInflightDuration -= i->expectedFrameDuration;
826 if (mExpectedInflightDuration < 0) {
827 LOGE("Negative expected in-flight duration!");
828 mExpectedInflightDuration = 0;
829 }
830
Thierry Strudel3d639192016-09-09 11:52:26 -0700831 return mPendingRequestsList.erase(i);
832}
833
834/*===========================================================================
835 * FUNCTION : camEvtHandle
836 *
837 * DESCRIPTION: Function registered to mm-camera-interface to handle events
838 *
839 * PARAMETERS :
840 * @camera_handle : interface layer camera handle
841 * @evt : ptr to event
842 * @user_data : user data ptr
843 *
844 * RETURN : none
845 *==========================================================================*/
846void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
847 mm_camera_event_t *evt,
848 void *user_data)
849{
850 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
851 if (obj && evt) {
852 switch(evt->server_event_type) {
853 case CAM_EVENT_TYPE_DAEMON_DIED:
854 pthread_mutex_lock(&obj->mMutex);
855 obj->mState = ERROR;
856 pthread_mutex_unlock(&obj->mMutex);
857 LOGE("Fatal, camera daemon died");
858 break;
859
860 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
861 LOGD("HAL got request pull from Daemon");
862 pthread_mutex_lock(&obj->mMutex);
863 obj->mWokenUpByDaemon = true;
864 obj->unblockRequestIfNecessary();
865 pthread_mutex_unlock(&obj->mMutex);
866 break;
867
868 default:
869 LOGW("Warning: Unhandled event %d",
870 evt->server_event_type);
871 break;
872 }
873 } else {
874 LOGE("NULL user_data/evt");
875 }
876}
877
878/*===========================================================================
879 * FUNCTION : openCamera
880 *
881 * DESCRIPTION: open camera
882 *
883 * PARAMETERS :
884 * @hw_device : double ptr for camera device struct
885 *
886 * RETURN : int32_t type of status
887 * NO_ERROR -- success
888 * none-zero failure code
889 *==========================================================================*/
890int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
891{
892 int rc = 0;
893 if (mState != CLOSED) {
894 *hw_device = NULL;
895 return PERMISSION_DENIED;
896 }
897
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700898 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800899 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700900 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
901 mCameraId);
902
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700903 if (mCameraHandle) {
904 LOGE("Failure: Camera already opened");
905 return ALREADY_EXISTS;
906 }
907
908 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700909 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700910 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700911 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700912 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700913 if (rc != 0) {
914 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
915 return rc;
916 }
Arnd Geis8cbfc182017-09-07 14:46:41 -0700917 mEaselFwUpdated = false;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700918 }
919 }
920
Thierry Strudel3d639192016-09-09 11:52:26 -0700921 rc = openCamera();
922 if (rc == 0) {
923 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800924 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700925 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700926
927 // Suspend Easel because opening camera failed.
928 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700929 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700930 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
931 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700932 if (suspendErr != 0) {
933 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
934 strerror(-suspendErr), suspendErr);
935 }
936 }
937 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800938 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700939
Thierry Strudel3d639192016-09-09 11:52:26 -0700940 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
941 mCameraId, rc);
942
943 if (rc == NO_ERROR) {
944 mState = OPENED;
945 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800946
Thierry Strudel3d639192016-09-09 11:52:26 -0700947 return rc;
948}
949
950/*===========================================================================
951 * FUNCTION : openCamera
952 *
953 * DESCRIPTION: open camera
954 *
955 * PARAMETERS : none
956 *
957 * RETURN : int32_t type of status
958 * NO_ERROR -- success
959 * none-zero failure code
960 *==========================================================================*/
961int QCamera3HardwareInterface::openCamera()
962{
963 int rc = 0;
964 char value[PROPERTY_VALUE_MAX];
965
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800966 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800967
Thierry Strudel3d639192016-09-09 11:52:26 -0700968 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
969 if (rc < 0) {
970 LOGE("Failed to reserve flash for camera id: %d",
971 mCameraId);
972 return UNKNOWN_ERROR;
973 }
974
975 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
976 if (rc) {
977 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
978 return rc;
979 }
980
981 if (!mCameraHandle) {
982 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
983 return -ENODEV;
984 }
985
986 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
987 camEvtHandle, (void *)this);
988
989 if (rc < 0) {
990 LOGE("Error, failed to register event callback");
991 /* Not closing camera here since it is already handled in destructor */
992 return FAILED_TRANSACTION;
993 }
994
995 mExifParams.debug_params =
996 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
997 if (mExifParams.debug_params) {
998 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
999 } else {
1000 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
1001 return NO_MEMORY;
1002 }
1003 mFirstConfiguration = true;
1004
1005 //Notify display HAL that a camera session is active.
1006 //But avoid calling the same during bootup because camera service might open/close
1007 //cameras at boot time during its initialization and display service will also internally
1008 //wait for camera service to initialize first while calling this display API, resulting in a
1009 //deadlock situation. Since boot time camera open/close calls are made only to fetch
1010 //capabilities, no need of this display bw optimization.
1011 //Use "service.bootanim.exit" property to know boot status.
1012 property_get("service.bootanim.exit", value, "0");
1013 if (atoi(value) == 1) {
1014 pthread_mutex_lock(&gCamLock);
1015 if (gNumCameraSessions++ == 0) {
1016 setCameraLaunchStatus(true);
1017 }
1018 pthread_mutex_unlock(&gCamLock);
1019 }
1020
1021 //fill the session id needed while linking dual cam
1022 pthread_mutex_lock(&gCamLock);
1023 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1024 &sessionId[mCameraId]);
1025 pthread_mutex_unlock(&gCamLock);
1026
1027 if (rc < 0) {
1028 LOGE("Error, failed to get sessiion id");
1029 return UNKNOWN_ERROR;
1030 } else {
1031 //Allocate related cam sync buffer
1032 //this is needed for the payload that goes along with bundling cmd for related
1033 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001034 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1035 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001036 if(rc != OK) {
1037 rc = NO_MEMORY;
1038 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1039 return NO_MEMORY;
1040 }
1041
1042 //Map memory for related cam sync buffer
1043 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001044 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1045 m_pDualCamCmdHeap->getFd(0),
1046 sizeof(cam_dual_camera_cmd_info_t),
1047 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001048 if(rc < 0) {
1049 LOGE("Dualcam: failed to map Related cam sync buffer");
1050 rc = FAILED_TRANSACTION;
1051 return NO_MEMORY;
1052 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001053 m_pDualCamCmdPtr =
1054 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001055 }
1056
1057 LOGH("mCameraId=%d",mCameraId);
1058
1059 return NO_ERROR;
1060}
1061
1062/*===========================================================================
1063 * FUNCTION : closeCamera
1064 *
1065 * DESCRIPTION: close camera
1066 *
1067 * PARAMETERS : none
1068 *
1069 * RETURN : int32_t type of status
1070 * NO_ERROR -- success
1071 * none-zero failure code
1072 *==========================================================================*/
1073int QCamera3HardwareInterface::closeCamera()
1074{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001075 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001076 int rc = NO_ERROR;
1077 char value[PROPERTY_VALUE_MAX];
1078
1079 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1080 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001081
1082 // unmap memory for related cam sync buffer
1083 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001084 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001085 if (NULL != m_pDualCamCmdHeap) {
1086 m_pDualCamCmdHeap->deallocate();
1087 delete m_pDualCamCmdHeap;
1088 m_pDualCamCmdHeap = NULL;
1089 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001090 }
1091
Thierry Strudel3d639192016-09-09 11:52:26 -07001092 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1093 mCameraHandle = NULL;
1094
1095 //reset session id to some invalid id
1096 pthread_mutex_lock(&gCamLock);
1097 sessionId[mCameraId] = 0xDEADBEEF;
1098 pthread_mutex_unlock(&gCamLock);
1099
1100 //Notify display HAL that there is no active camera session
1101 //but avoid calling the same during bootup. Refer to openCamera
1102 //for more details.
1103 property_get("service.bootanim.exit", value, "0");
1104 if (atoi(value) == 1) {
1105 pthread_mutex_lock(&gCamLock);
1106 if (--gNumCameraSessions == 0) {
1107 setCameraLaunchStatus(false);
1108 }
1109 pthread_mutex_unlock(&gCamLock);
1110 }
1111
Thierry Strudel3d639192016-09-09 11:52:26 -07001112 if (mExifParams.debug_params) {
1113 free(mExifParams.debug_params);
1114 mExifParams.debug_params = NULL;
1115 }
1116 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1117 LOGW("Failed to release flash for camera id: %d",
1118 mCameraId);
1119 }
1120 mState = CLOSED;
1121 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1122 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001123
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001124 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001125 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001126 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001127 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001128 if (rc != 0) {
1129 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1130 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001131 }
1132 }
1133
Thierry Strudel3d639192016-09-09 11:52:26 -07001134 return rc;
1135}
1136
1137/*===========================================================================
1138 * FUNCTION : initialize
1139 *
1140 * DESCRIPTION: Initialize frameworks callback functions
1141 *
1142 * PARAMETERS :
1143 * @callback_ops : callback function to frameworks
1144 *
1145 * RETURN :
1146 *
1147 *==========================================================================*/
1148int QCamera3HardwareInterface::initialize(
1149 const struct camera3_callback_ops *callback_ops)
1150{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001151 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001152 int rc;
1153
1154 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1155 pthread_mutex_lock(&mMutex);
1156
1157 // Validate current state
1158 switch (mState) {
1159 case OPENED:
1160 /* valid state */
1161 break;
1162 default:
1163 LOGE("Invalid state %d", mState);
1164 rc = -ENODEV;
1165 goto err1;
1166 }
1167
1168 rc = initParameters();
1169 if (rc < 0) {
1170 LOGE("initParamters failed %d", rc);
1171 goto err1;
1172 }
1173 mCallbackOps = callback_ops;
1174
1175 mChannelHandle = mCameraHandle->ops->add_channel(
1176 mCameraHandle->camera_handle, NULL, NULL, this);
1177 if (mChannelHandle == 0) {
1178 LOGE("add_channel failed");
1179 rc = -ENOMEM;
1180 pthread_mutex_unlock(&mMutex);
1181 return rc;
1182 }
1183
1184 pthread_mutex_unlock(&mMutex);
1185 mCameraInitialized = true;
1186 mState = INITIALIZED;
1187 LOGI("X");
1188 return 0;
1189
1190err1:
1191 pthread_mutex_unlock(&mMutex);
1192 return rc;
1193}
1194
1195/*===========================================================================
1196 * FUNCTION : validateStreamDimensions
1197 *
1198 * DESCRIPTION: Check if the configuration requested are those advertised
1199 *
1200 * PARAMETERS :
1201 * @stream_list : streams to be configured
1202 *
1203 * RETURN :
1204 *
1205 *==========================================================================*/
1206int QCamera3HardwareInterface::validateStreamDimensions(
1207 camera3_stream_configuration_t *streamList)
1208{
1209 int rc = NO_ERROR;
1210 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001211 uint32_t depthWidth = 0;
1212 uint32_t depthHeight = 0;
1213 if (mPDSupported) {
1214 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1215 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1216 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001217
1218 camera3_stream_t *inputStream = NULL;
1219 /*
1220 * Loop through all streams to find input stream if it exists*
1221 */
1222 for (size_t i = 0; i< streamList->num_streams; i++) {
1223 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1224 if (inputStream != NULL) {
1225 LOGE("Error, Multiple input streams requested");
1226 return -EINVAL;
1227 }
1228 inputStream = streamList->streams[i];
1229 }
1230 }
1231 /*
1232 * Loop through all streams requested in configuration
1233 * Check if unsupported sizes have been requested on any of them
1234 */
1235 for (size_t j = 0; j < streamList->num_streams; j++) {
1236 bool sizeFound = false;
1237 camera3_stream_t *newStream = streamList->streams[j];
1238
1239 uint32_t rotatedHeight = newStream->height;
1240 uint32_t rotatedWidth = newStream->width;
1241 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1242 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1243 rotatedHeight = newStream->width;
1244 rotatedWidth = newStream->height;
1245 }
1246
1247 /*
1248 * Sizes are different for each type of stream format check against
1249 * appropriate table.
1250 */
1251 switch (newStream->format) {
1252 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1253 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1254 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001255 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1256 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1257 mPDSupported) {
1258 if ((depthWidth == newStream->width) &&
1259 (depthHeight == newStream->height)) {
1260 sizeFound = true;
1261 }
1262 break;
1263 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001264 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1265 for (size_t i = 0; i < count; i++) {
1266 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1267 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1268 sizeFound = true;
1269 break;
1270 }
1271 }
1272 break;
1273 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001274 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1275 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001276 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001277 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001278 if ((depthSamplesCount == newStream->width) &&
1279 (1 == newStream->height)) {
1280 sizeFound = true;
1281 }
1282 break;
1283 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001284 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1285 /* Verify set size against generated sizes table */
1286 for (size_t i = 0; i < count; i++) {
1287 if (((int32_t)rotatedWidth ==
1288 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1289 ((int32_t)rotatedHeight ==
1290 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1291 sizeFound = true;
1292 break;
1293 }
1294 }
1295 break;
1296 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1297 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1298 default:
1299 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1300 || newStream->stream_type == CAMERA3_STREAM_INPUT
1301 || IS_USAGE_ZSL(newStream->usage)) {
1302 if (((int32_t)rotatedWidth ==
1303 gCamCapability[mCameraId]->active_array_size.width) &&
1304 ((int32_t)rotatedHeight ==
1305 gCamCapability[mCameraId]->active_array_size.height)) {
1306 sizeFound = true;
1307 break;
1308 }
1309 /* We could potentially break here to enforce ZSL stream
1310 * set from frameworks always is full active array size
1311 * but it is not clear from the spc if framework will always
1312 * follow that, also we have logic to override to full array
1313 * size, so keeping the logic lenient at the moment
1314 */
1315 }
1316 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1317 MAX_SIZES_CNT);
1318 for (size_t i = 0; i < count; i++) {
1319 if (((int32_t)rotatedWidth ==
1320 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1321 ((int32_t)rotatedHeight ==
1322 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1323 sizeFound = true;
1324 break;
1325 }
1326 }
1327 break;
1328 } /* End of switch(newStream->format) */
1329
1330 /* We error out even if a single stream has unsupported size set */
1331 if (!sizeFound) {
1332 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1333 rotatedWidth, rotatedHeight, newStream->format,
1334 gCamCapability[mCameraId]->active_array_size.width,
1335 gCamCapability[mCameraId]->active_array_size.height);
1336 rc = -EINVAL;
1337 break;
1338 }
1339 } /* End of for each stream */
1340 return rc;
1341}
1342
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001343/*===========================================================================
1344 * FUNCTION : validateUsageFlags
1345 *
1346 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1347 *
1348 * PARAMETERS :
1349 * @stream_list : streams to be configured
1350 *
1351 * RETURN :
1352 * NO_ERROR if the usage flags are supported
1353 * error code if usage flags are not supported
1354 *
1355 *==========================================================================*/
1356int QCamera3HardwareInterface::validateUsageFlags(
1357 const camera3_stream_configuration_t* streamList)
1358{
1359 for (size_t j = 0; j < streamList->num_streams; j++) {
1360 const camera3_stream_t *newStream = streamList->streams[j];
1361
1362 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1363 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1364 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1365 continue;
1366 }
1367
Jason Leec4cf5032017-05-24 18:31:41 -07001368 // Here we only care whether it's EIS3 or not
1369 char is_type_value[PROPERTY_VALUE_MAX];
1370 property_get("persist.camera.is_type", is_type_value, "4");
1371 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1372 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1373 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1374 isType = IS_TYPE_NONE;
1375
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001376 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1377 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1378 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1379 bool forcePreviewUBWC = true;
1380 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1381 forcePreviewUBWC = false;
1382 }
1383 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001384 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001385 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001386 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001387 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001388 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001389
1390 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1391 // So color spaces will always match.
1392
1393 // Check whether underlying formats of shared streams match.
1394 if (isVideo && isPreview && videoFormat != previewFormat) {
1395 LOGE("Combined video and preview usage flag is not supported");
1396 return -EINVAL;
1397 }
1398 if (isPreview && isZSL && previewFormat != zslFormat) {
1399 LOGE("Combined preview and zsl usage flag is not supported");
1400 return -EINVAL;
1401 }
1402 if (isVideo && isZSL && videoFormat != zslFormat) {
1403 LOGE("Combined video and zsl usage flag is not supported");
1404 return -EINVAL;
1405 }
1406 }
1407 return NO_ERROR;
1408}
1409
1410/*===========================================================================
1411 * FUNCTION : validateUsageFlagsForEis
1412 *
1413 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1414 *
1415 * PARAMETERS :
1416 * @stream_list : streams to be configured
1417 *
1418 * RETURN :
1419 * NO_ERROR if the usage flags are supported
1420 * error code if usage flags are not supported
1421 *
1422 *==========================================================================*/
1423int QCamera3HardwareInterface::validateUsageFlagsForEis(
1424 const camera3_stream_configuration_t* streamList)
1425{
1426 for (size_t j = 0; j < streamList->num_streams; j++) {
1427 const camera3_stream_t *newStream = streamList->streams[j];
1428
1429 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1430 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1431
1432 // Because EIS is "hard-coded" for certain use case, and current
1433 // implementation doesn't support shared preview and video on the same
1434 // stream, return failure if EIS is forced on.
1435 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1436 LOGE("Combined video and preview usage flag is not supported due to EIS");
1437 return -EINVAL;
1438 }
1439 }
1440 return NO_ERROR;
1441}
1442
Thierry Strudel3d639192016-09-09 11:52:26 -07001443/*==============================================================================
1444 * FUNCTION : isSupportChannelNeeded
1445 *
1446 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1447 *
1448 * PARAMETERS :
1449 * @stream_list : streams to be configured
1450 * @stream_config_info : the config info for streams to be configured
1451 *
1452 * RETURN : Boolen true/false decision
1453 *
1454 *==========================================================================*/
1455bool QCamera3HardwareInterface::isSupportChannelNeeded(
1456 camera3_stream_configuration_t *streamList,
1457 cam_stream_size_info_t stream_config_info)
1458{
1459 uint32_t i;
1460 bool pprocRequested = false;
1461 /* Check for conditions where PProc pipeline does not have any streams*/
1462 for (i = 0; i < stream_config_info.num_streams; i++) {
1463 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1464 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1465 pprocRequested = true;
1466 break;
1467 }
1468 }
1469
1470 if (pprocRequested == false )
1471 return true;
1472
1473 /* Dummy stream needed if only raw or jpeg streams present */
1474 for (i = 0; i < streamList->num_streams; i++) {
1475 switch(streamList->streams[i]->format) {
1476 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1477 case HAL_PIXEL_FORMAT_RAW10:
1478 case HAL_PIXEL_FORMAT_RAW16:
1479 case HAL_PIXEL_FORMAT_BLOB:
1480 break;
1481 default:
1482 return false;
1483 }
1484 }
1485 return true;
1486}
1487
1488/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001489 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001490 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001491 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001492 *
1493 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001494 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001495 *
1496 * RETURN : int32_t type of status
1497 * NO_ERROR -- success
1498 * none-zero failure code
1499 *
1500 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001501int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001502{
1503 int32_t rc = NO_ERROR;
1504
1505 cam_dimension_t max_dim = {0, 0};
1506 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1507 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1508 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1509 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1510 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1511 }
1512
1513 clear_metadata_buffer(mParameters);
1514
1515 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1516 max_dim);
1517 if (rc != NO_ERROR) {
1518 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1519 return rc;
1520 }
1521
1522 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1523 if (rc != NO_ERROR) {
1524 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1525 return rc;
1526 }
1527
1528 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001529 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001530
1531 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1532 mParameters);
1533 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001534 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001535 return rc;
1536 }
1537
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001538 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001539 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1540 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1541 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1542 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1543 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001544
1545 return rc;
1546}
1547
1548/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001549 * FUNCTION : getCurrentSensorModeInfo
1550 *
1551 * DESCRIPTION: Get sensor mode information that is currently selected.
1552 *
1553 * PARAMETERS :
1554 * @sensorModeInfo : sensor mode information (output)
1555 *
1556 * RETURN : int32_t type of status
1557 * NO_ERROR -- success
1558 * none-zero failure code
1559 *
1560 *==========================================================================*/
1561int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1562{
1563 int32_t rc = NO_ERROR;
1564
1565 clear_metadata_buffer(mParameters);
1566 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1567
1568 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1569 mParameters);
1570 if (rc != NO_ERROR) {
1571 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1572 return rc;
1573 }
1574
1575 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1576 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1577 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1578 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1579 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1580 sensorModeInfo.num_raw_bits);
1581
1582 return rc;
1583}
1584
1585/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001586 * FUNCTION : addToPPFeatureMask
1587 *
1588 * DESCRIPTION: add additional features to pp feature mask based on
1589 * stream type and usecase
1590 *
1591 * PARAMETERS :
1592 * @stream_format : stream type for feature mask
1593 * @stream_idx : stream idx within postprocess_mask list to change
1594 *
1595 * RETURN : NULL
1596 *
1597 *==========================================================================*/
1598void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1599 uint32_t stream_idx)
1600{
1601 char feature_mask_value[PROPERTY_VALUE_MAX];
1602 cam_feature_mask_t feature_mask;
1603 int args_converted;
1604 int property_len;
1605
1606 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001607#ifdef _LE_CAMERA_
1608 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1609 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1610 property_len = property_get("persist.camera.hal3.feature",
1611 feature_mask_value, swtnr_feature_mask_value);
1612#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001613 property_len = property_get("persist.camera.hal3.feature",
1614 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001615#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001616 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1617 (feature_mask_value[1] == 'x')) {
1618 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1619 } else {
1620 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1621 }
1622 if (1 != args_converted) {
1623 feature_mask = 0;
1624 LOGE("Wrong feature mask %s", feature_mask_value);
1625 return;
1626 }
1627
1628 switch (stream_format) {
1629 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1630 /* Add LLVD to pp feature mask only if video hint is enabled */
1631 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1632 mStreamConfigInfo.postprocess_mask[stream_idx]
1633 |= CAM_QTI_FEATURE_SW_TNR;
1634 LOGH("Added SW TNR to pp feature mask");
1635 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1636 mStreamConfigInfo.postprocess_mask[stream_idx]
1637 |= CAM_QCOM_FEATURE_LLVD;
1638 LOGH("Added LLVD SeeMore to pp feature mask");
1639 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001640 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1641 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1642 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1643 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001644 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1645 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1646 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1647 CAM_QTI_FEATURE_BINNING_CORRECTION;
1648 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001649 break;
1650 }
1651 default:
1652 break;
1653 }
1654 LOGD("PP feature mask %llx",
1655 mStreamConfigInfo.postprocess_mask[stream_idx]);
1656}
1657
1658/*==============================================================================
1659 * FUNCTION : updateFpsInPreviewBuffer
1660 *
1661 * DESCRIPTION: update FPS information in preview buffer.
1662 *
1663 * PARAMETERS :
1664 * @metadata : pointer to metadata buffer
1665 * @frame_number: frame_number to look for in pending buffer list
1666 *
1667 * RETURN : None
1668 *
1669 *==========================================================================*/
1670void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1671 uint32_t frame_number)
1672{
1673 // Mark all pending buffers for this particular request
1674 // with corresponding framerate information
1675 for (List<PendingBuffersInRequest>::iterator req =
1676 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1677 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1678 for(List<PendingBufferInfo>::iterator j =
1679 req->mPendingBufferList.begin();
1680 j != req->mPendingBufferList.end(); j++) {
1681 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1682 if ((req->frame_number == frame_number) &&
1683 (channel->getStreamTypeMask() &
1684 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1685 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1686 CAM_INTF_PARM_FPS_RANGE, metadata) {
1687 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1688 struct private_handle_t *priv_handle =
1689 (struct private_handle_t *)(*(j->buffer));
1690 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1691 }
1692 }
1693 }
1694 }
1695}
1696
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001697/*==============================================================================
1698 * FUNCTION : updateTimeStampInPendingBuffers
1699 *
1700 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1701 * of a frame number
1702 *
1703 * PARAMETERS :
1704 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1705 * @timestamp : timestamp to be set
1706 *
1707 * RETURN : None
1708 *
1709 *==========================================================================*/
1710void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1711 uint32_t frameNumber, nsecs_t timestamp)
1712{
1713 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1714 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
Binhao Lin09245482017-08-31 18:25:29 -07001715 // WAR: save the av_timestamp to the next frame
1716 if(req->frame_number == frameNumber + 1) {
1717 req->av_timestamp = timestamp;
1718 }
1719
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001720 if (req->frame_number != frameNumber)
1721 continue;
1722
1723 for (auto k = req->mPendingBufferList.begin();
1724 k != req->mPendingBufferList.end(); k++ ) {
Binhao Lin09245482017-08-31 18:25:29 -07001725 // WAR: update timestamp when it's not VT usecase
1726 QCamera3Channel *channel = (QCamera3Channel *)k->stream->priv;
1727 if (!((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
1728 m_bAVTimerEnabled)) {
1729 struct private_handle_t *priv_handle =
1730 (struct private_handle_t *) (*(k->buffer));
1731 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1732 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001733 }
1734 }
1735 return;
1736}
1737
Thierry Strudel3d639192016-09-09 11:52:26 -07001738/*===========================================================================
1739 * FUNCTION : configureStreams
1740 *
1741 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1742 * and output streams.
1743 *
1744 * PARAMETERS :
1745 * @stream_list : streams to be configured
1746 *
1747 * RETURN :
1748 *
1749 *==========================================================================*/
1750int QCamera3HardwareInterface::configureStreams(
1751 camera3_stream_configuration_t *streamList)
1752{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001753 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001754 int rc = 0;
1755
1756 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001757 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001758 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001759 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001760
1761 return rc;
1762}
1763
1764/*===========================================================================
1765 * FUNCTION : configureStreamsPerfLocked
1766 *
1767 * DESCRIPTION: configureStreams while perfLock is held.
1768 *
1769 * PARAMETERS :
1770 * @stream_list : streams to be configured
1771 *
1772 * RETURN : int32_t type of status
1773 * NO_ERROR -- success
1774 * none-zero failure code
1775 *==========================================================================*/
1776int QCamera3HardwareInterface::configureStreamsPerfLocked(
1777 camera3_stream_configuration_t *streamList)
1778{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001779 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001780 int rc = 0;
1781
1782 // Sanity check stream_list
1783 if (streamList == NULL) {
1784 LOGE("NULL stream configuration");
1785 return BAD_VALUE;
1786 }
1787 if (streamList->streams == NULL) {
1788 LOGE("NULL stream list");
1789 return BAD_VALUE;
1790 }
1791
1792 if (streamList->num_streams < 1) {
1793 LOGE("Bad number of streams requested: %d",
1794 streamList->num_streams);
1795 return BAD_VALUE;
1796 }
1797
1798 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1799 LOGE("Maximum number of streams %d exceeded: %d",
1800 MAX_NUM_STREAMS, streamList->num_streams);
1801 return BAD_VALUE;
1802 }
1803
Jason Leec4cf5032017-05-24 18:31:41 -07001804 mOpMode = streamList->operation_mode;
1805 LOGD("mOpMode: %d", mOpMode);
1806
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001807 rc = validateUsageFlags(streamList);
1808 if (rc != NO_ERROR) {
1809 return rc;
1810 }
1811
Chien-Yu Chen11c8edc2017-09-11 20:54:24 -07001812 // Disable HDR+ if it's enabled;
Chien-Yu Chen153c5172017-09-08 11:33:19 -07001813 {
1814 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1815 finishHdrPlusClientOpeningLocked(l);
1816 disableHdrPlusModeLocked();
1817 }
1818
Thierry Strudel3d639192016-09-09 11:52:26 -07001819 /* first invalidate all the steams in the mStreamList
1820 * if they appear again, they will be validated */
1821 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1822 it != mStreamInfo.end(); it++) {
1823 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1824 if (channel) {
1825 channel->stop();
1826 }
1827 (*it)->status = INVALID;
1828 }
1829
1830 if (mRawDumpChannel) {
1831 mRawDumpChannel->stop();
1832 delete mRawDumpChannel;
1833 mRawDumpChannel = NULL;
1834 }
1835
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001836 if (mHdrPlusRawSrcChannel) {
1837 mHdrPlusRawSrcChannel->stop();
1838 delete mHdrPlusRawSrcChannel;
1839 mHdrPlusRawSrcChannel = NULL;
1840 }
1841
Thierry Strudel3d639192016-09-09 11:52:26 -07001842 if (mSupportChannel)
1843 mSupportChannel->stop();
1844
1845 if (mAnalysisChannel) {
1846 mAnalysisChannel->stop();
1847 }
1848 if (mMetadataChannel) {
1849 /* If content of mStreamInfo is not 0, there is metadata stream */
1850 mMetadataChannel->stop();
1851 }
1852 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07001853 stopChannelLocked(/*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001854 }
1855
1856 pthread_mutex_lock(&mMutex);
1857
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001858 mPictureChannel = NULL;
1859
Thierry Strudel3d639192016-09-09 11:52:26 -07001860 // Check state
1861 switch (mState) {
1862 case INITIALIZED:
1863 case CONFIGURED:
1864 case STARTED:
1865 /* valid state */
1866 break;
1867 default:
1868 LOGE("Invalid state %d", mState);
1869 pthread_mutex_unlock(&mMutex);
1870 return -ENODEV;
1871 }
1872
1873 /* Check whether we have video stream */
1874 m_bIs4KVideo = false;
1875 m_bIsVideo = false;
1876 m_bEisSupportedSize = false;
1877 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001878 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001879 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001880 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001881 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001882 uint32_t videoWidth = 0U;
1883 uint32_t videoHeight = 0U;
1884 size_t rawStreamCnt = 0;
1885 size_t stallStreamCnt = 0;
1886 size_t processedStreamCnt = 0;
1887 // Number of streams on ISP encoder path
1888 size_t numStreamsOnEncoder = 0;
1889 size_t numYuv888OnEncoder = 0;
1890 bool bYuv888OverrideJpeg = false;
1891 cam_dimension_t largeYuv888Size = {0, 0};
1892 cam_dimension_t maxViewfinderSize = {0, 0};
1893 bool bJpegExceeds4K = false;
1894 bool bJpegOnEncoder = false;
1895 bool bUseCommonFeatureMask = false;
1896 cam_feature_mask_t commonFeatureMask = 0;
1897 bool bSmallJpegSize = false;
1898 uint32_t width_ratio;
1899 uint32_t height_ratio;
1900 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1901 camera3_stream_t *inputStream = NULL;
1902 bool isJpeg = false;
1903 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001904 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001905 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001906
1907 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1908
1909 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001910 uint8_t eis_prop_set;
1911 uint32_t maxEisWidth = 0;
1912 uint32_t maxEisHeight = 0;
1913
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001914 // Initialize all instant AEC related variables
1915 mInstantAEC = false;
1916 mResetInstantAEC = false;
1917 mInstantAECSettledFrameNumber = 0;
1918 mAecSkipDisplayFrameBound = 0;
1919 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001920 mCurrFeatureState = 0;
1921 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001922
Binhao Lin09245482017-08-31 18:25:29 -07001923 m_bAVTimerEnabled = false;
1924
Thierry Strudel3d639192016-09-09 11:52:26 -07001925 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1926
1927 size_t count = IS_TYPE_MAX;
1928 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1929 for (size_t i = 0; i < count; i++) {
1930 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001931 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1932 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001933 break;
1934 }
1935 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001936
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001937 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001938 maxEisWidth = MAX_EIS_WIDTH;
1939 maxEisHeight = MAX_EIS_HEIGHT;
1940 }
1941
1942 /* EIS setprop control */
1943 char eis_prop[PROPERTY_VALUE_MAX];
1944 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001945 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001946 eis_prop_set = (uint8_t)atoi(eis_prop);
1947
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001948 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001949 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1950
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001951 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1952 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001953
Thierry Strudel3d639192016-09-09 11:52:26 -07001954 /* stream configurations */
1955 for (size_t i = 0; i < streamList->num_streams; i++) {
1956 camera3_stream_t *newStream = streamList->streams[i];
1957 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1958 "height = %d, rotation = %d, usage = 0x%x",
1959 i, newStream->stream_type, newStream->format,
1960 newStream->width, newStream->height, newStream->rotation,
1961 newStream->usage);
1962 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1963 newStream->stream_type == CAMERA3_STREAM_INPUT){
1964 isZsl = true;
1965 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001966 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1967 IS_USAGE_PREVIEW(newStream->usage)) {
1968 isPreview = true;
1969 }
1970
Thierry Strudel3d639192016-09-09 11:52:26 -07001971 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1972 inputStream = newStream;
1973 }
1974
Emilian Peev7650c122017-01-19 08:24:33 -08001975 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1976 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001977 isJpeg = true;
1978 jpegSize.width = newStream->width;
1979 jpegSize.height = newStream->height;
1980 if (newStream->width > VIDEO_4K_WIDTH ||
1981 newStream->height > VIDEO_4K_HEIGHT)
1982 bJpegExceeds4K = true;
1983 }
1984
1985 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1986 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1987 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001988 // In HAL3 we can have multiple different video streams.
1989 // The variables video width and height are used below as
1990 // dimensions of the biggest of them
1991 if (videoWidth < newStream->width ||
1992 videoHeight < newStream->height) {
1993 videoWidth = newStream->width;
1994 videoHeight = newStream->height;
1995 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001996 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1997 (VIDEO_4K_HEIGHT <= newStream->height)) {
1998 m_bIs4KVideo = true;
1999 }
2000 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
2001 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002002
Thierry Strudel3d639192016-09-09 11:52:26 -07002003 }
2004 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
2005 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
2006 switch (newStream->format) {
2007 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002008 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2009 depthPresent = true;
2010 break;
2011 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002012 stallStreamCnt++;
2013 if (isOnEncoder(maxViewfinderSize, newStream->width,
2014 newStream->height)) {
2015 numStreamsOnEncoder++;
2016 bJpegOnEncoder = true;
2017 }
2018 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
2019 newStream->width);
2020 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
2021 newStream->height);;
2022 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2023 "FATAL: max_downscale_factor cannot be zero and so assert");
2024 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2025 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2026 LOGH("Setting small jpeg size flag to true");
2027 bSmallJpegSize = true;
2028 }
2029 break;
2030 case HAL_PIXEL_FORMAT_RAW10:
2031 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2032 case HAL_PIXEL_FORMAT_RAW16:
2033 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002034 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2035 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2036 pdStatCount++;
2037 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002038 break;
2039 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2040 processedStreamCnt++;
2041 if (isOnEncoder(maxViewfinderSize, newStream->width,
2042 newStream->height)) {
2043 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2044 !IS_USAGE_ZSL(newStream->usage)) {
2045 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2046 }
2047 numStreamsOnEncoder++;
2048 }
2049 break;
2050 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2051 processedStreamCnt++;
2052 if (isOnEncoder(maxViewfinderSize, newStream->width,
2053 newStream->height)) {
2054 // If Yuv888 size is not greater than 4K, set feature mask
2055 // to SUPERSET so that it support concurrent request on
2056 // YUV and JPEG.
2057 if (newStream->width <= VIDEO_4K_WIDTH &&
2058 newStream->height <= VIDEO_4K_HEIGHT) {
2059 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2060 }
2061 numStreamsOnEncoder++;
2062 numYuv888OnEncoder++;
2063 largeYuv888Size.width = newStream->width;
2064 largeYuv888Size.height = newStream->height;
2065 }
2066 break;
2067 default:
2068 processedStreamCnt++;
2069 if (isOnEncoder(maxViewfinderSize, newStream->width,
2070 newStream->height)) {
2071 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2072 numStreamsOnEncoder++;
2073 }
2074 break;
2075 }
2076
2077 }
2078 }
2079
2080 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2081 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2082 !m_bIsVideo) {
2083 m_bEisEnable = false;
2084 }
2085
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002086 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2087 pthread_mutex_unlock(&mMutex);
2088 return -EINVAL;
2089 }
2090
Thierry Strudel54dc9782017-02-15 12:12:10 -08002091 uint8_t forceEnableTnr = 0;
2092 char tnr_prop[PROPERTY_VALUE_MAX];
2093 memset(tnr_prop, 0, sizeof(tnr_prop));
2094 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2095 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2096
Thierry Strudel3d639192016-09-09 11:52:26 -07002097 /* Logic to enable/disable TNR based on specific config size/etc.*/
2098 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002099 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2100 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002101 else if (forceEnableTnr)
2102 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002103
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002104 char videoHdrProp[PROPERTY_VALUE_MAX];
2105 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2106 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2107 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2108
2109 if (hdr_mode_prop == 1 && m_bIsVideo &&
2110 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2111 m_bVideoHdrEnabled = true;
2112 else
2113 m_bVideoHdrEnabled = false;
2114
2115
Thierry Strudel3d639192016-09-09 11:52:26 -07002116 /* Check if num_streams is sane */
2117 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2118 rawStreamCnt > MAX_RAW_STREAMS ||
2119 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2120 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2121 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2122 pthread_mutex_unlock(&mMutex);
2123 return -EINVAL;
2124 }
2125 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002126 if (isZsl && m_bIs4KVideo) {
2127 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002128 pthread_mutex_unlock(&mMutex);
2129 return -EINVAL;
2130 }
2131 /* Check if stream sizes are sane */
2132 if (numStreamsOnEncoder > 2) {
2133 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2134 pthread_mutex_unlock(&mMutex);
2135 return -EINVAL;
2136 } else if (1 < numStreamsOnEncoder){
2137 bUseCommonFeatureMask = true;
2138 LOGH("Multiple streams above max viewfinder size, common mask needed");
2139 }
2140
2141 /* Check if BLOB size is greater than 4k in 4k recording case */
2142 if (m_bIs4KVideo && bJpegExceeds4K) {
2143 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2144 pthread_mutex_unlock(&mMutex);
2145 return -EINVAL;
2146 }
2147
Emilian Peev7650c122017-01-19 08:24:33 -08002148 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2149 depthPresent) {
2150 LOGE("HAL doesn't support depth streams in HFR mode!");
2151 pthread_mutex_unlock(&mMutex);
2152 return -EINVAL;
2153 }
2154
Thierry Strudel3d639192016-09-09 11:52:26 -07002155 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2156 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2157 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2158 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2159 // configurations:
2160 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2161 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2162 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2163 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2164 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2165 __func__);
2166 pthread_mutex_unlock(&mMutex);
2167 return -EINVAL;
2168 }
2169
2170 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2171 // the YUV stream's size is greater or equal to the JPEG size, set common
2172 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2173 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2174 jpegSize.width, jpegSize.height) &&
2175 largeYuv888Size.width > jpegSize.width &&
2176 largeYuv888Size.height > jpegSize.height) {
2177 bYuv888OverrideJpeg = true;
2178 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2179 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2180 }
2181
2182 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2183 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2184 commonFeatureMask);
2185 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2186 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2187
2188 rc = validateStreamDimensions(streamList);
2189 if (rc == NO_ERROR) {
2190 rc = validateStreamRotations(streamList);
2191 }
2192 if (rc != NO_ERROR) {
2193 LOGE("Invalid stream configuration requested!");
2194 pthread_mutex_unlock(&mMutex);
2195 return rc;
2196 }
2197
Emilian Peev0f3c3162017-03-15 12:57:46 +00002198 if (1 < pdStatCount) {
2199 LOGE("HAL doesn't support multiple PD streams");
2200 pthread_mutex_unlock(&mMutex);
2201 return -EINVAL;
2202 }
2203
2204 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2205 (1 == pdStatCount)) {
2206 LOGE("HAL doesn't support PD streams in HFR mode!");
2207 pthread_mutex_unlock(&mMutex);
2208 return -EINVAL;
2209 }
2210
Thierry Strudel3d639192016-09-09 11:52:26 -07002211 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2212 for (size_t i = 0; i < streamList->num_streams; i++) {
2213 camera3_stream_t *newStream = streamList->streams[i];
2214 LOGH("newStream type = %d, stream format = %d "
2215 "stream size : %d x %d, stream rotation = %d",
2216 newStream->stream_type, newStream->format,
2217 newStream->width, newStream->height, newStream->rotation);
2218 //if the stream is in the mStreamList validate it
2219 bool stream_exists = false;
2220 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2221 it != mStreamInfo.end(); it++) {
2222 if ((*it)->stream == newStream) {
2223 QCamera3ProcessingChannel *channel =
2224 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2225 stream_exists = true;
2226 if (channel)
2227 delete channel;
2228 (*it)->status = VALID;
2229 (*it)->stream->priv = NULL;
2230 (*it)->channel = NULL;
2231 }
2232 }
2233 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2234 //new stream
2235 stream_info_t* stream_info;
2236 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2237 if (!stream_info) {
2238 LOGE("Could not allocate stream info");
2239 rc = -ENOMEM;
2240 pthread_mutex_unlock(&mMutex);
2241 return rc;
2242 }
2243 stream_info->stream = newStream;
2244 stream_info->status = VALID;
2245 stream_info->channel = NULL;
Chien-Yu Chen3d836272017-09-20 11:10:21 -07002246 stream_info->id = i; // ID will be re-assigned in cleanAndSortStreamInfo().
Thierry Strudel3d639192016-09-09 11:52:26 -07002247 mStreamInfo.push_back(stream_info);
2248 }
2249 /* Covers Opaque ZSL and API1 F/W ZSL */
2250 if (IS_USAGE_ZSL(newStream->usage)
2251 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2252 if (zslStream != NULL) {
2253 LOGE("Multiple input/reprocess streams requested!");
2254 pthread_mutex_unlock(&mMutex);
2255 return BAD_VALUE;
2256 }
2257 zslStream = newStream;
2258 }
2259 /* Covers YUV reprocess */
2260 if (inputStream != NULL) {
2261 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2262 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2263 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2264 && inputStream->width == newStream->width
2265 && inputStream->height == newStream->height) {
2266 if (zslStream != NULL) {
2267 /* This scenario indicates multiple YUV streams with same size
2268 * as input stream have been requested, since zsl stream handle
2269 * is solely use for the purpose of overriding the size of streams
2270 * which share h/w streams we will just make a guess here as to
2271 * which of the stream is a ZSL stream, this will be refactored
2272 * once we make generic logic for streams sharing encoder output
2273 */
2274 LOGH("Warning, Multiple ip/reprocess streams requested!");
2275 }
2276 zslStream = newStream;
2277 }
2278 }
2279 }
2280
2281 /* If a zsl stream is set, we know that we have configured at least one input or
2282 bidirectional stream */
2283 if (NULL != zslStream) {
2284 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2285 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2286 mInputStreamInfo.format = zslStream->format;
2287 mInputStreamInfo.usage = zslStream->usage;
2288 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2289 mInputStreamInfo.dim.width,
2290 mInputStreamInfo.dim.height,
2291 mInputStreamInfo.format, mInputStreamInfo.usage);
2292 }
2293
2294 cleanAndSortStreamInfo();
2295 if (mMetadataChannel) {
2296 delete mMetadataChannel;
2297 mMetadataChannel = NULL;
2298 }
2299 if (mSupportChannel) {
2300 delete mSupportChannel;
2301 mSupportChannel = NULL;
2302 }
2303
2304 if (mAnalysisChannel) {
2305 delete mAnalysisChannel;
2306 mAnalysisChannel = NULL;
2307 }
2308
2309 if (mDummyBatchChannel) {
2310 delete mDummyBatchChannel;
2311 mDummyBatchChannel = NULL;
2312 }
2313
Emilian Peev7650c122017-01-19 08:24:33 -08002314 if (mDepthChannel) {
2315 mDepthChannel = NULL;
2316 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002317 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002318
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002319 mShutterDispatcher.clear();
2320 mOutputBufferDispatcher.clear();
2321
Thierry Strudel2896d122017-02-23 19:18:03 -08002322 char is_type_value[PROPERTY_VALUE_MAX];
2323 property_get("persist.camera.is_type", is_type_value, "4");
2324 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2325
Binhao Line406f062017-05-03 14:39:44 -07002326 char property_value[PROPERTY_VALUE_MAX];
2327 property_get("persist.camera.gzoom.at", property_value, "0");
2328 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002329 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2330 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2331 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2332 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002333
2334 property_get("persist.camera.gzoom.4k", property_value, "0");
2335 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2336
Thierry Strudel3d639192016-09-09 11:52:26 -07002337 //Create metadata channel and initialize it
2338 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2339 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2340 gCamCapability[mCameraId]->color_arrangement);
2341 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2342 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002343 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002344 if (mMetadataChannel == NULL) {
2345 LOGE("failed to allocate metadata channel");
2346 rc = -ENOMEM;
2347 pthread_mutex_unlock(&mMutex);
2348 return rc;
2349 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002350 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002351 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2352 if (rc < 0) {
2353 LOGE("metadata channel initialization failed");
2354 delete mMetadataChannel;
2355 mMetadataChannel = NULL;
2356 pthread_mutex_unlock(&mMutex);
2357 return rc;
2358 }
2359
Thierry Strudel2896d122017-02-23 19:18:03 -08002360 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002361 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002362 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002363 // Keep track of preview/video streams indices.
2364 // There could be more than one preview streams, but only one video stream.
2365 int32_t video_stream_idx = -1;
2366 int32_t preview_stream_idx[streamList->num_streams];
2367 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002368 bool previewTnr[streamList->num_streams];
2369 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2370 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2371 // Loop through once to determine preview TNR conditions before creating channels.
2372 for (size_t i = 0; i < streamList->num_streams; i++) {
2373 camera3_stream_t *newStream = streamList->streams[i];
2374 uint32_t stream_usage = newStream->usage;
2375 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2376 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2377 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2378 video_stream_idx = (int32_t)i;
2379 else
2380 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2381 }
2382 }
2383 // By default, preview stream TNR is disabled.
2384 // Enable TNR to the preview stream if all conditions below are satisfied:
2385 // 1. preview resolution == video resolution.
2386 // 2. video stream TNR is enabled.
2387 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2388 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2389 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2390 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2391 if (m_bTnrEnabled && m_bTnrVideo &&
2392 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2393 video_stream->width == preview_stream->width &&
2394 video_stream->height == preview_stream->height) {
2395 previewTnr[preview_stream_idx[i]] = true;
2396 }
2397 }
2398
Thierry Strudel3d639192016-09-09 11:52:26 -07002399 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2400 /* Allocate channel objects for the requested streams */
2401 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002402
Thierry Strudel3d639192016-09-09 11:52:26 -07002403 camera3_stream_t *newStream = streamList->streams[i];
2404 uint32_t stream_usage = newStream->usage;
2405 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2406 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2407 struct camera_info *p_info = NULL;
2408 pthread_mutex_lock(&gCamLock);
2409 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2410 pthread_mutex_unlock(&gCamLock);
2411 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2412 || IS_USAGE_ZSL(newStream->usage)) &&
2413 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002414 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002415 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002416 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2417 if (bUseCommonFeatureMask)
2418 zsl_ppmask = commonFeatureMask;
2419 else
2420 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002421 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002422 if (numStreamsOnEncoder > 0)
2423 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2424 else
2425 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002426 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002427 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002428 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002429 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002430 LOGH("Input stream configured, reprocess config");
2431 } else {
2432 //for non zsl streams find out the format
2433 switch (newStream->format) {
2434 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2435 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002436 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2438 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2439 /* add additional features to pp feature mask */
2440 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2441 mStreamConfigInfo.num_streams);
2442
2443 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2444 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2445 CAM_STREAM_TYPE_VIDEO;
2446 if (m_bTnrEnabled && m_bTnrVideo) {
2447 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2448 CAM_QCOM_FEATURE_CPP_TNR;
2449 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2450 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2451 ~CAM_QCOM_FEATURE_CDS;
2452 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002453 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2454 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2455 CAM_QTI_FEATURE_PPEISCORE;
2456 }
Binhao Line406f062017-05-03 14:39:44 -07002457 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2458 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2459 CAM_QCOM_FEATURE_GOOG_ZOOM;
2460 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002461 } else {
2462 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2463 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002464 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002465 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2466 CAM_QCOM_FEATURE_CPP_TNR;
2467 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2468 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2469 ~CAM_QCOM_FEATURE_CDS;
2470 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002471 if(!m_bSwTnrPreview) {
2472 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2473 ~CAM_QTI_FEATURE_SW_TNR;
2474 }
Binhao Line406f062017-05-03 14:39:44 -07002475 if (is_goog_zoom_preview_enabled) {
2476 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2477 CAM_QCOM_FEATURE_GOOG_ZOOM;
2478 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002479 padding_info.width_padding = mSurfaceStridePadding;
2480 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002481 previewSize.width = (int32_t)newStream->width;
2482 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002483 }
2484 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2485 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2486 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2487 newStream->height;
2488 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2489 newStream->width;
2490 }
2491 }
2492 break;
2493 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002494 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002495 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2496 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2497 if (bUseCommonFeatureMask)
2498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2499 commonFeatureMask;
2500 else
2501 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2502 CAM_QCOM_FEATURE_NONE;
2503 } else {
2504 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2505 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2506 }
2507 break;
2508 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002509 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002510 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2511 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2512 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2513 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2514 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002515 /* Remove rotation if it is not supported
2516 for 4K LiveVideo snapshot case (online processing) */
2517 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2518 CAM_QCOM_FEATURE_ROTATION)) {
2519 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2520 &= ~CAM_QCOM_FEATURE_ROTATION;
2521 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002522 } else {
2523 if (bUseCommonFeatureMask &&
2524 isOnEncoder(maxViewfinderSize, newStream->width,
2525 newStream->height)) {
2526 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2527 } else {
2528 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2529 }
2530 }
2531 if (isZsl) {
2532 if (zslStream) {
2533 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2534 (int32_t)zslStream->width;
2535 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2536 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002537 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2538 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002539 } else {
2540 LOGE("Error, No ZSL stream identified");
2541 pthread_mutex_unlock(&mMutex);
2542 return -EINVAL;
2543 }
2544 } else if (m_bIs4KVideo) {
2545 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2546 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2547 } else if (bYuv888OverrideJpeg) {
2548 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2549 (int32_t)largeYuv888Size.width;
2550 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2551 (int32_t)largeYuv888Size.height;
2552 }
2553 break;
2554 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2555 case HAL_PIXEL_FORMAT_RAW16:
2556 case HAL_PIXEL_FORMAT_RAW10:
2557 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2558 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2559 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002560 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2561 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2562 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2563 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2564 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2565 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2566 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2567 gCamCapability[mCameraId]->dt[mPDIndex];
2568 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2569 gCamCapability[mCameraId]->vc[mPDIndex];
2570 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002571 break;
2572 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002573 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002574 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2575 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2576 break;
2577 }
2578 }
2579
2580 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2581 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2582 gCamCapability[mCameraId]->color_arrangement);
2583
2584 if (newStream->priv == NULL) {
2585 //New stream, construct channel
2586 switch (newStream->stream_type) {
2587 case CAMERA3_STREAM_INPUT:
2588 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2589 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2590 break;
2591 case CAMERA3_STREAM_BIDIRECTIONAL:
2592 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2593 GRALLOC_USAGE_HW_CAMERA_WRITE;
2594 break;
2595 case CAMERA3_STREAM_OUTPUT:
2596 /* For video encoding stream, set read/write rarely
2597 * flag so that they may be set to un-cached */
2598 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2599 newStream->usage |=
2600 (GRALLOC_USAGE_SW_READ_RARELY |
2601 GRALLOC_USAGE_SW_WRITE_RARELY |
2602 GRALLOC_USAGE_HW_CAMERA_WRITE);
2603 else if (IS_USAGE_ZSL(newStream->usage))
2604 {
2605 LOGD("ZSL usage flag skipping");
2606 }
2607 else if (newStream == zslStream
2608 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2609 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2610 } else
2611 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2612 break;
2613 default:
2614 LOGE("Invalid stream_type %d", newStream->stream_type);
2615 break;
2616 }
2617
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002618 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002619 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2620 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2621 QCamera3ProcessingChannel *channel = NULL;
2622 switch (newStream->format) {
2623 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2624 if ((newStream->usage &
2625 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2626 (streamList->operation_mode ==
2627 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2628 ) {
2629 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2630 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002631 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002632 this,
2633 newStream,
2634 (cam_stream_type_t)
2635 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2636 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2637 mMetadataChannel,
2638 0); //heap buffers are not required for HFR video channel
2639 if (channel == NULL) {
2640 LOGE("allocation of channel failed");
2641 pthread_mutex_unlock(&mMutex);
2642 return -ENOMEM;
2643 }
2644 //channel->getNumBuffers() will return 0 here so use
2645 //MAX_INFLIGH_HFR_REQUESTS
2646 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2647 newStream->priv = channel;
2648 LOGI("num video buffers in HFR mode: %d",
2649 MAX_INFLIGHT_HFR_REQUESTS);
2650 } else {
2651 /* Copy stream contents in HFR preview only case to create
2652 * dummy batch channel so that sensor streaming is in
2653 * HFR mode */
2654 if (!m_bIsVideo && (streamList->operation_mode ==
2655 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2656 mDummyBatchStream = *newStream;
2657 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002658 int bufferCount = MAX_INFLIGHT_REQUESTS;
2659 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2660 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002661 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2662 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2663 bufferCount = m_bIs4KVideo ?
2664 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2665 }
2666
Thierry Strudel2896d122017-02-23 19:18:03 -08002667 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002668 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2669 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002670 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002671 this,
2672 newStream,
2673 (cam_stream_type_t)
2674 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2675 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2676 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002677 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002678 if (channel == NULL) {
2679 LOGE("allocation of channel failed");
2680 pthread_mutex_unlock(&mMutex);
2681 return -ENOMEM;
2682 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002683 /* disable UBWC for preview, though supported,
2684 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002685 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002686 (previewSize.width == (int32_t)videoWidth)&&
2687 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002688 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002689 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002690 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002691 /* When goog_zoom is linked to the preview or video stream,
2692 * disable ubwc to the linked stream */
2693 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2694 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2695 channel->setUBWCEnabled(false);
2696 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002697 newStream->max_buffers = channel->getNumBuffers();
2698 newStream->priv = channel;
2699 }
2700 break;
2701 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2702 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2703 mChannelHandle,
2704 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002705 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 this,
2707 newStream,
2708 (cam_stream_type_t)
2709 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2710 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2711 mMetadataChannel);
2712 if (channel == NULL) {
2713 LOGE("allocation of YUV channel failed");
2714 pthread_mutex_unlock(&mMutex);
2715 return -ENOMEM;
2716 }
2717 newStream->max_buffers = channel->getNumBuffers();
2718 newStream->priv = channel;
2719 break;
2720 }
2721 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2722 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002723 case HAL_PIXEL_FORMAT_RAW10: {
2724 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2725 (HAL_DATASPACE_DEPTH != newStream->data_space))
2726 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002727 mRawChannel = new QCamera3RawChannel(
2728 mCameraHandle->camera_handle, mChannelHandle,
2729 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002730 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002731 this, newStream,
2732 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002733 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002734 if (mRawChannel == NULL) {
2735 LOGE("allocation of raw channel failed");
2736 pthread_mutex_unlock(&mMutex);
2737 return -ENOMEM;
2738 }
2739 newStream->max_buffers = mRawChannel->getNumBuffers();
2740 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2741 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002742 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002743 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002744 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2745 mDepthChannel = new QCamera3DepthChannel(
2746 mCameraHandle->camera_handle, mChannelHandle,
2747 mCameraHandle->ops, NULL, NULL, &padding_info,
2748 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2749 mMetadataChannel);
2750 if (NULL == mDepthChannel) {
2751 LOGE("Allocation of depth channel failed");
2752 pthread_mutex_unlock(&mMutex);
2753 return NO_MEMORY;
2754 }
2755 newStream->priv = mDepthChannel;
2756 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2757 } else {
2758 // Max live snapshot inflight buffer is 1. This is to mitigate
2759 // frame drop issues for video snapshot. The more buffers being
2760 // allocated, the more frame drops there are.
2761 mPictureChannel = new QCamera3PicChannel(
2762 mCameraHandle->camera_handle, mChannelHandle,
2763 mCameraHandle->ops, captureResultCb,
2764 setBufferErrorStatus, &padding_info, this, newStream,
2765 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2766 m_bIs4KVideo, isZsl, mMetadataChannel,
2767 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2768 if (mPictureChannel == NULL) {
2769 LOGE("allocation of channel failed");
2770 pthread_mutex_unlock(&mMutex);
2771 return -ENOMEM;
2772 }
2773 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2774 newStream->max_buffers = mPictureChannel->getNumBuffers();
2775 mPictureChannel->overrideYuvSize(
2776 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2777 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002778 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002779 break;
2780
2781 default:
2782 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002783 pthread_mutex_unlock(&mMutex);
2784 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002785 }
2786 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2787 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2788 } else {
2789 LOGE("Error, Unknown stream type");
2790 pthread_mutex_unlock(&mMutex);
2791 return -EINVAL;
2792 }
2793
2794 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002795 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002796 // Here we only care whether it's EIS3 or not
2797 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2798 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2799 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2800 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002801 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002802 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002803 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002804 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2805 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2806 }
2807 }
2808
2809 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2810 it != mStreamInfo.end(); it++) {
2811 if ((*it)->stream == newStream) {
2812 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2813 break;
2814 }
2815 }
2816 } else {
2817 // Channel already exists for this stream
2818 // Do nothing for now
2819 }
2820 padding_info = gCamCapability[mCameraId]->padding_info;
2821
Emilian Peev7650c122017-01-19 08:24:33 -08002822 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002823 * since there is no real stream associated with it
2824 */
Emilian Peev7650c122017-01-19 08:24:33 -08002825 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002826 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2827 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002828 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002829 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002830 }
2831
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002832 // Let buffer dispatcher know the configured streams.
2833 mOutputBufferDispatcher.configureStreams(streamList);
2834
Thierry Strudel2896d122017-02-23 19:18:03 -08002835 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2836 onlyRaw = false;
2837 }
2838
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002839 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002840 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002841 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002842 cam_analysis_info_t analysisInfo;
2843 int32_t ret = NO_ERROR;
2844 ret = mCommon.getAnalysisInfo(
2845 FALSE,
2846 analysisFeatureMask,
2847 &analysisInfo);
2848 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002849 cam_color_filter_arrangement_t analysis_color_arrangement =
2850 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2851 CAM_FILTER_ARRANGEMENT_Y :
2852 gCamCapability[mCameraId]->color_arrangement);
2853 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2854 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002855 cam_dimension_t analysisDim;
2856 analysisDim = mCommon.getMatchingDimension(previewSize,
2857 analysisInfo.analysis_recommended_res);
2858
2859 mAnalysisChannel = new QCamera3SupportChannel(
2860 mCameraHandle->camera_handle,
2861 mChannelHandle,
2862 mCameraHandle->ops,
2863 &analysisInfo.analysis_padding_info,
2864 analysisFeatureMask,
2865 CAM_STREAM_TYPE_ANALYSIS,
2866 &analysisDim,
2867 (analysisInfo.analysis_format
2868 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2869 : CAM_FORMAT_YUV_420_NV21),
2870 analysisInfo.hw_analysis_supported,
2871 gCamCapability[mCameraId]->color_arrangement,
2872 this,
2873 0); // force buffer count to 0
2874 } else {
2875 LOGW("getAnalysisInfo failed, ret = %d", ret);
2876 }
2877 if (!mAnalysisChannel) {
2878 LOGW("Analysis channel cannot be created");
2879 }
2880 }
2881
Thierry Strudel3d639192016-09-09 11:52:26 -07002882 //RAW DUMP channel
2883 if (mEnableRawDump && isRawStreamRequested == false){
2884 cam_dimension_t rawDumpSize;
2885 rawDumpSize = getMaxRawSize(mCameraId);
2886 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2887 setPAAFSupport(rawDumpFeatureMask,
2888 CAM_STREAM_TYPE_RAW,
2889 gCamCapability[mCameraId]->color_arrangement);
2890 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2891 mChannelHandle,
2892 mCameraHandle->ops,
2893 rawDumpSize,
2894 &padding_info,
2895 this, rawDumpFeatureMask);
2896 if (!mRawDumpChannel) {
2897 LOGE("Raw Dump channel cannot be created");
2898 pthread_mutex_unlock(&mMutex);
2899 return -ENOMEM;
2900 }
2901 }
2902
Thierry Strudel3d639192016-09-09 11:52:26 -07002903 if (mAnalysisChannel) {
2904 cam_analysis_info_t analysisInfo;
2905 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2906 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2907 CAM_STREAM_TYPE_ANALYSIS;
2908 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2909 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002910 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002911 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2912 &analysisInfo);
2913 if (rc != NO_ERROR) {
2914 LOGE("getAnalysisInfo failed, ret = %d", rc);
2915 pthread_mutex_unlock(&mMutex);
2916 return rc;
2917 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002918 cam_color_filter_arrangement_t analysis_color_arrangement =
2919 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2920 CAM_FILTER_ARRANGEMENT_Y :
2921 gCamCapability[mCameraId]->color_arrangement);
2922 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2923 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2924 analysis_color_arrangement);
2925
Thierry Strudel3d639192016-09-09 11:52:26 -07002926 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002927 mCommon.getMatchingDimension(previewSize,
2928 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002929 mStreamConfigInfo.num_streams++;
2930 }
2931
Thierry Strudel2896d122017-02-23 19:18:03 -08002932 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002933 cam_analysis_info_t supportInfo;
2934 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2935 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2936 setPAAFSupport(callbackFeatureMask,
2937 CAM_STREAM_TYPE_CALLBACK,
2938 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002939 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002940 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002941 if (ret != NO_ERROR) {
2942 /* Ignore the error for Mono camera
2943 * because the PAAF bit mask is only set
2944 * for CAM_STREAM_TYPE_ANALYSIS stream type
2945 */
2946 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2947 LOGW("getAnalysisInfo failed, ret = %d", ret);
2948 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002949 }
2950 mSupportChannel = new QCamera3SupportChannel(
2951 mCameraHandle->camera_handle,
2952 mChannelHandle,
2953 mCameraHandle->ops,
2954 &gCamCapability[mCameraId]->padding_info,
2955 callbackFeatureMask,
2956 CAM_STREAM_TYPE_CALLBACK,
2957 &QCamera3SupportChannel::kDim,
2958 CAM_FORMAT_YUV_420_NV21,
2959 supportInfo.hw_analysis_supported,
2960 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002961 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002962 if (!mSupportChannel) {
2963 LOGE("dummy channel cannot be created");
2964 pthread_mutex_unlock(&mMutex);
2965 return -ENOMEM;
2966 }
2967 }
2968
2969 if (mSupportChannel) {
2970 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2971 QCamera3SupportChannel::kDim;
2972 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2973 CAM_STREAM_TYPE_CALLBACK;
2974 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2975 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2976 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2977 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2978 gCamCapability[mCameraId]->color_arrangement);
2979 mStreamConfigInfo.num_streams++;
2980 }
2981
2982 if (mRawDumpChannel) {
2983 cam_dimension_t rawSize;
2984 rawSize = getMaxRawSize(mCameraId);
2985 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2986 rawSize;
2987 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2988 CAM_STREAM_TYPE_RAW;
2989 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2990 CAM_QCOM_FEATURE_NONE;
2991 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2992 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2993 gCamCapability[mCameraId]->color_arrangement);
2994 mStreamConfigInfo.num_streams++;
2995 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002996
2997 if (mHdrPlusRawSrcChannel) {
2998 cam_dimension_t rawSize;
2999 rawSize = getMaxRawSize(mCameraId);
3000 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
3001 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
3002 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
3003 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3004 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3005 gCamCapability[mCameraId]->color_arrangement);
3006 mStreamConfigInfo.num_streams++;
3007 }
3008
Thierry Strudel3d639192016-09-09 11:52:26 -07003009 /* In HFR mode, if video stream is not added, create a dummy channel so that
3010 * ISP can create a batch mode even for preview only case. This channel is
3011 * never 'start'ed (no stream-on), it is only 'initialized' */
3012 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
3013 !m_bIsVideo) {
3014 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3015 setPAAFSupport(dummyFeatureMask,
3016 CAM_STREAM_TYPE_VIDEO,
3017 gCamCapability[mCameraId]->color_arrangement);
3018 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
3019 mChannelHandle,
3020 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003021 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07003022 this,
3023 &mDummyBatchStream,
3024 CAM_STREAM_TYPE_VIDEO,
3025 dummyFeatureMask,
3026 mMetadataChannel);
3027 if (NULL == mDummyBatchChannel) {
3028 LOGE("creation of mDummyBatchChannel failed."
3029 "Preview will use non-hfr sensor mode ");
3030 }
3031 }
3032 if (mDummyBatchChannel) {
3033 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3034 mDummyBatchStream.width;
3035 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3036 mDummyBatchStream.height;
3037 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3038 CAM_STREAM_TYPE_VIDEO;
3039 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3040 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3041 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3042 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3043 gCamCapability[mCameraId]->color_arrangement);
3044 mStreamConfigInfo.num_streams++;
3045 }
3046
3047 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3048 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003049 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003050 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003051
3052 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3053 for (pendingRequestIterator i = mPendingRequestsList.begin();
3054 i != mPendingRequestsList.end();) {
3055 i = erasePendingRequest(i);
3056 }
3057 mPendingFrameDropList.clear();
3058 // Initialize/Reset the pending buffers list
3059 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3060 req.mPendingBufferList.clear();
3061 }
3062 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003063 mExpectedInflightDuration = 0;
3064 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003065
Thierry Strudel3d639192016-09-09 11:52:26 -07003066 mCurJpegMeta.clear();
3067 //Get min frame duration for this streams configuration
3068 deriveMinFrameDuration();
3069
Chien-Yu Chenee335912017-02-09 17:53:20 -08003070 mFirstPreviewIntentSeen = false;
3071
Thierry Strudel3d639192016-09-09 11:52:26 -07003072 // Update state
3073 mState = CONFIGURED;
3074
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003075 mFirstMetadataCallback = true;
3076
Thierry Strudel3d639192016-09-09 11:52:26 -07003077 pthread_mutex_unlock(&mMutex);
3078
3079 return rc;
3080}
3081
3082/*===========================================================================
3083 * FUNCTION : validateCaptureRequest
3084 *
3085 * DESCRIPTION: validate a capture request from camera service
3086 *
3087 * PARAMETERS :
3088 * @request : request from framework to process
3089 *
3090 * RETURN :
3091 *
3092 *==========================================================================*/
3093int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003094 camera3_capture_request_t *request,
3095 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003096{
3097 ssize_t idx = 0;
3098 const camera3_stream_buffer_t *b;
3099 CameraMetadata meta;
3100
3101 /* Sanity check the request */
3102 if (request == NULL) {
3103 LOGE("NULL capture request");
3104 return BAD_VALUE;
3105 }
3106
3107 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3108 /*settings cannot be null for the first request*/
3109 return BAD_VALUE;
3110 }
3111
3112 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003113 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3114 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003115 LOGE("Request %d: No output buffers provided!",
3116 __FUNCTION__, frameNumber);
3117 return BAD_VALUE;
3118 }
3119 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3120 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3121 request->num_output_buffers, MAX_NUM_STREAMS);
3122 return BAD_VALUE;
3123 }
3124 if (request->input_buffer != NULL) {
3125 b = request->input_buffer;
3126 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3127 LOGE("Request %d: Buffer %ld: Status not OK!",
3128 frameNumber, (long)idx);
3129 return BAD_VALUE;
3130 }
3131 if (b->release_fence != -1) {
3132 LOGE("Request %d: Buffer %ld: Has a release fence!",
3133 frameNumber, (long)idx);
3134 return BAD_VALUE;
3135 }
3136 if (b->buffer == NULL) {
3137 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3138 frameNumber, (long)idx);
3139 return BAD_VALUE;
3140 }
3141 }
3142
3143 // Validate all buffers
3144 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003145 if (b == NULL) {
3146 return BAD_VALUE;
3147 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003148 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003149 QCamera3ProcessingChannel *channel =
3150 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3151 if (channel == NULL) {
3152 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3153 frameNumber, (long)idx);
3154 return BAD_VALUE;
3155 }
3156 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3157 LOGE("Request %d: Buffer %ld: Status not OK!",
3158 frameNumber, (long)idx);
3159 return BAD_VALUE;
3160 }
3161 if (b->release_fence != -1) {
3162 LOGE("Request %d: Buffer %ld: Has a release fence!",
3163 frameNumber, (long)idx);
3164 return BAD_VALUE;
3165 }
3166 if (b->buffer == NULL) {
3167 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3168 frameNumber, (long)idx);
3169 return BAD_VALUE;
3170 }
3171 if (*(b->buffer) == NULL) {
3172 LOGE("Request %d: Buffer %ld: NULL private handle!",
3173 frameNumber, (long)idx);
3174 return BAD_VALUE;
3175 }
3176 idx++;
3177 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003178 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003179 return NO_ERROR;
3180}
3181
3182/*===========================================================================
3183 * FUNCTION : deriveMinFrameDuration
3184 *
3185 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3186 * on currently configured streams.
3187 *
3188 * PARAMETERS : NONE
3189 *
3190 * RETURN : NONE
3191 *
3192 *==========================================================================*/
3193void QCamera3HardwareInterface::deriveMinFrameDuration()
3194{
3195 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003196 bool hasRaw = false;
3197
3198 mMinRawFrameDuration = 0;
3199 mMinJpegFrameDuration = 0;
3200 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003201
3202 maxJpegDim = 0;
3203 maxProcessedDim = 0;
3204 maxRawDim = 0;
3205
3206 // Figure out maximum jpeg, processed, and raw dimensions
3207 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3208 it != mStreamInfo.end(); it++) {
3209
3210 // Input stream doesn't have valid stream_type
3211 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3212 continue;
3213
3214 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3215 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3216 if (dimension > maxJpegDim)
3217 maxJpegDim = dimension;
3218 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3219 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3220 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003221 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003222 if (dimension > maxRawDim)
3223 maxRawDim = dimension;
3224 } else {
3225 if (dimension > maxProcessedDim)
3226 maxProcessedDim = dimension;
3227 }
3228 }
3229
3230 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3231 MAX_SIZES_CNT);
3232
3233 //Assume all jpeg dimensions are in processed dimensions.
3234 if (maxJpegDim > maxProcessedDim)
3235 maxProcessedDim = maxJpegDim;
3236 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003237 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003238 maxRawDim = INT32_MAX;
3239
3240 for (size_t i = 0; i < count; i++) {
3241 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3242 gCamCapability[mCameraId]->raw_dim[i].height;
3243 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3244 maxRawDim = dimension;
3245 }
3246 }
3247
3248 //Find minimum durations for processed, jpeg, and raw
3249 for (size_t i = 0; i < count; i++) {
3250 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3251 gCamCapability[mCameraId]->raw_dim[i].height) {
3252 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3253 break;
3254 }
3255 }
3256 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3257 for (size_t i = 0; i < count; i++) {
3258 if (maxProcessedDim ==
3259 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3260 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3261 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3262 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3263 break;
3264 }
3265 }
3266}
3267
3268/*===========================================================================
3269 * FUNCTION : getMinFrameDuration
3270 *
3271 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3272 * and current request configuration.
3273 *
3274 * PARAMETERS : @request: requset sent by the frameworks
3275 *
3276 * RETURN : min farme duration for a particular request
3277 *
3278 *==========================================================================*/
3279int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3280{
3281 bool hasJpegStream = false;
3282 bool hasRawStream = false;
3283 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3284 const camera3_stream_t *stream = request->output_buffers[i].stream;
3285 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3286 hasJpegStream = true;
3287 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3288 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3289 stream->format == HAL_PIXEL_FORMAT_RAW16)
3290 hasRawStream = true;
3291 }
3292
3293 if (!hasJpegStream)
3294 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3295 else
3296 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3297}
3298
3299/*===========================================================================
3300 * FUNCTION : handleBuffersDuringFlushLock
3301 *
3302 * DESCRIPTION: Account for buffers returned from back-end during flush
3303 * This function is executed while mMutex is held by the caller.
3304 *
3305 * PARAMETERS :
3306 * @buffer: image buffer for the callback
3307 *
3308 * RETURN :
3309 *==========================================================================*/
3310void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3311{
3312 bool buffer_found = false;
3313 for (List<PendingBuffersInRequest>::iterator req =
3314 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3315 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3316 for (List<PendingBufferInfo>::iterator i =
3317 req->mPendingBufferList.begin();
3318 i != req->mPendingBufferList.end(); i++) {
3319 if (i->buffer == buffer->buffer) {
3320 mPendingBuffersMap.numPendingBufsAtFlush--;
3321 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3322 buffer->buffer, req->frame_number,
3323 mPendingBuffersMap.numPendingBufsAtFlush);
3324 buffer_found = true;
3325 break;
3326 }
3327 }
3328 if (buffer_found) {
3329 break;
3330 }
3331 }
3332 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3333 //signal the flush()
3334 LOGD("All buffers returned to HAL. Continue flush");
3335 pthread_cond_signal(&mBuffersCond);
3336 }
3337}
3338
Thierry Strudel3d639192016-09-09 11:52:26 -07003339/*===========================================================================
3340 * FUNCTION : handleBatchMetadata
3341 *
3342 * DESCRIPTION: Handles metadata buffer callback in batch mode
3343 *
3344 * PARAMETERS : @metadata_buf: metadata buffer
3345 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3346 * the meta buf in this method
3347 *
3348 * RETURN :
3349 *
3350 *==========================================================================*/
3351void QCamera3HardwareInterface::handleBatchMetadata(
3352 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3353{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003354 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003355
3356 if (NULL == metadata_buf) {
3357 LOGE("metadata_buf is NULL");
3358 return;
3359 }
3360 /* In batch mode, the metdata will contain the frame number and timestamp of
3361 * the last frame in the batch. Eg: a batch containing buffers from request
3362 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3363 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3364 * multiple process_capture_results */
3365 metadata_buffer_t *metadata =
3366 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3367 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3368 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3369 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3370 uint32_t frame_number = 0, urgent_frame_number = 0;
3371 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3372 bool invalid_metadata = false;
3373 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3374 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003375 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003376
3377 int32_t *p_frame_number_valid =
3378 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3379 uint32_t *p_frame_number =
3380 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3381 int64_t *p_capture_time =
3382 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3383 int32_t *p_urgent_frame_number_valid =
3384 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3385 uint32_t *p_urgent_frame_number =
3386 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3387
3388 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3389 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3390 (NULL == p_urgent_frame_number)) {
3391 LOGE("Invalid metadata");
3392 invalid_metadata = true;
3393 } else {
3394 frame_number_valid = *p_frame_number_valid;
3395 last_frame_number = *p_frame_number;
3396 last_frame_capture_time = *p_capture_time;
3397 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3398 last_urgent_frame_number = *p_urgent_frame_number;
3399 }
3400
3401 /* In batchmode, when no video buffers are requested, set_parms are sent
3402 * for every capture_request. The difference between consecutive urgent
3403 * frame numbers and frame numbers should be used to interpolate the
3404 * corresponding frame numbers and time stamps */
3405 pthread_mutex_lock(&mMutex);
3406 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003407 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3408 if(idx < 0) {
3409 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3410 last_urgent_frame_number);
3411 mState = ERROR;
3412 pthread_mutex_unlock(&mMutex);
3413 return;
3414 }
3415 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003416 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3417 first_urgent_frame_number;
3418
3419 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3420 urgent_frame_number_valid,
3421 first_urgent_frame_number, last_urgent_frame_number);
3422 }
3423
3424 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003425 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3426 if(idx < 0) {
3427 LOGE("Invalid frame number received: %d. Irrecoverable error",
3428 last_frame_number);
3429 mState = ERROR;
3430 pthread_mutex_unlock(&mMutex);
3431 return;
3432 }
3433 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003434 frameNumDiff = last_frame_number + 1 -
3435 first_frame_number;
3436 mPendingBatchMap.removeItem(last_frame_number);
3437
3438 LOGD("frm: valid: %d frm_num: %d - %d",
3439 frame_number_valid,
3440 first_frame_number, last_frame_number);
3441
3442 }
3443 pthread_mutex_unlock(&mMutex);
3444
3445 if (urgent_frame_number_valid || frame_number_valid) {
3446 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3447 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3448 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3449 urgentFrameNumDiff, last_urgent_frame_number);
3450 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3451 LOGE("frameNumDiff: %d frameNum: %d",
3452 frameNumDiff, last_frame_number);
3453 }
3454
3455 for (size_t i = 0; i < loopCount; i++) {
3456 /* handleMetadataWithLock is called even for invalid_metadata for
3457 * pipeline depth calculation */
3458 if (!invalid_metadata) {
3459 /* Infer frame number. Batch metadata contains frame number of the
3460 * last frame */
3461 if (urgent_frame_number_valid) {
3462 if (i < urgentFrameNumDiff) {
3463 urgent_frame_number =
3464 first_urgent_frame_number + i;
3465 LOGD("inferred urgent frame_number: %d",
3466 urgent_frame_number);
3467 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3468 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3469 } else {
3470 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3471 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3472 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3473 }
3474 }
3475
3476 /* Infer frame number. Batch metadata contains frame number of the
3477 * last frame */
3478 if (frame_number_valid) {
3479 if (i < frameNumDiff) {
3480 frame_number = first_frame_number + i;
3481 LOGD("inferred frame_number: %d", frame_number);
3482 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3483 CAM_INTF_META_FRAME_NUMBER, frame_number);
3484 } else {
3485 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3486 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3487 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3488 }
3489 }
3490
3491 if (last_frame_capture_time) {
3492 //Infer timestamp
3493 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003494 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003496 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003497 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3498 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3499 LOGD("batch capture_time: %lld, capture_time: %lld",
3500 last_frame_capture_time, capture_time);
3501 }
3502 }
3503 pthread_mutex_lock(&mMutex);
3504 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003505 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003506 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3507 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003508 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003509 pthread_mutex_unlock(&mMutex);
3510 }
3511
3512 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003513 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003514 mMetadataChannel->bufDone(metadata_buf);
3515 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003516 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003517 }
3518}
3519
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003520void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3521 camera3_error_msg_code_t errorCode)
3522{
3523 camera3_notify_msg_t notify_msg;
3524 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3525 notify_msg.type = CAMERA3_MSG_ERROR;
3526 notify_msg.message.error.error_code = errorCode;
3527 notify_msg.message.error.error_stream = NULL;
3528 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003529 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003530
3531 return;
3532}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003533
3534/*===========================================================================
3535 * FUNCTION : sendPartialMetadataWithLock
3536 *
3537 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3538 *
3539 * PARAMETERS : @metadata: metadata buffer
3540 * @requestIter: The iterator for the pending capture request for
3541 * which the partial result is being sen
3542 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3543 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003544 * @isJumpstartMetadata: Whether this is a partial metadata for
3545 * jumpstart, i.e. even though it doesn't map to a valid partial
3546 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003547 *
3548 * RETURN :
3549 *
3550 *==========================================================================*/
3551
3552void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3553 metadata_buffer_t *metadata,
3554 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003555 bool lastUrgentMetadataInBatch,
3556 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003557{
3558 camera3_capture_result_t result;
3559 memset(&result, 0, sizeof(camera3_capture_result_t));
3560
3561 requestIter->partial_result_cnt++;
3562
3563 // Extract 3A metadata
3564 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003565 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3566 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003567 // Populate metadata result
3568 result.frame_number = requestIter->frame_number;
3569 result.num_output_buffers = 0;
3570 result.output_buffers = NULL;
3571 result.partial_result = requestIter->partial_result_cnt;
3572
3573 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003574 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003575 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3576 // Notify HDR+ client about the partial metadata.
3577 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3578 result.partial_result == PARTIAL_RESULT_COUNT);
3579 }
3580 }
3581
3582 orchestrateResult(&result);
3583 LOGD("urgent frame_number = %u", result.frame_number);
3584 free_camera_metadata((camera_metadata_t *)result.result);
3585}
3586
Thierry Strudel3d639192016-09-09 11:52:26 -07003587/*===========================================================================
3588 * FUNCTION : handleMetadataWithLock
3589 *
3590 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3591 *
3592 * PARAMETERS : @metadata_buf: metadata buffer
3593 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3594 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003595 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3596 * last urgent metadata in a batch. Always true for non-batch mode
3597 * @lastMetadataInBatch: Boolean to indicate whether this is the
3598 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003599 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3600 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003601 *
3602 * RETURN :
3603 *
3604 *==========================================================================*/
3605void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003606 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003607 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3608 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003609{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003610 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003611 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3612 //during flush do not send metadata from this thread
3613 LOGD("not sending metadata during flush or when mState is error");
3614 if (free_and_bufdone_meta_buf) {
3615 mMetadataChannel->bufDone(metadata_buf);
3616 free(metadata_buf);
3617 }
3618 return;
3619 }
3620
3621 //not in flush
3622 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3623 int32_t frame_number_valid, urgent_frame_number_valid;
3624 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003625 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003626 nsecs_t currentSysTime;
3627
3628 int32_t *p_frame_number_valid =
3629 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3630 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3631 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003632 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003633 int32_t *p_urgent_frame_number_valid =
3634 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3635 uint32_t *p_urgent_frame_number =
3636 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3637 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3638 metadata) {
3639 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3640 *p_frame_number_valid, *p_frame_number);
3641 }
3642
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003643 camera_metadata_t *resultMetadata = nullptr;
3644
Thierry Strudel3d639192016-09-09 11:52:26 -07003645 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3646 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3647 LOGE("Invalid metadata");
3648 if (free_and_bufdone_meta_buf) {
3649 mMetadataChannel->bufDone(metadata_buf);
3650 free(metadata_buf);
3651 }
3652 goto done_metadata;
3653 }
3654 frame_number_valid = *p_frame_number_valid;
3655 frame_number = *p_frame_number;
3656 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003657 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003658 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3659 urgent_frame_number = *p_urgent_frame_number;
3660 currentSysTime = systemTime(CLOCK_MONOTONIC);
3661
Jason Lee603176d2017-05-31 11:43:27 -07003662 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3663 const int tries = 3;
3664 nsecs_t bestGap, measured;
3665 for (int i = 0; i < tries; ++i) {
3666 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3667 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3668 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3669 const nsecs_t gap = tmono2 - tmono;
3670 if (i == 0 || gap < bestGap) {
3671 bestGap = gap;
3672 measured = tbase - ((tmono + tmono2) >> 1);
3673 }
3674 }
3675 capture_time -= measured;
3676 }
3677
Thierry Strudel3d639192016-09-09 11:52:26 -07003678 // Detect if buffers from any requests are overdue
3679 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003680 int64_t timeout;
3681 {
3682 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3683 // If there is a pending HDR+ request, the following requests may be blocked until the
3684 // HDR+ request is done. So allow a longer timeout.
3685 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3686 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003687 if (timeout < mExpectedInflightDuration) {
3688 timeout = mExpectedInflightDuration;
3689 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003690 }
3691
3692 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003693 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003694 assert(missed.stream->priv);
3695 if (missed.stream->priv) {
3696 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3697 assert(ch->mStreams[0]);
3698 if (ch->mStreams[0]) {
3699 LOGE("Cancel missing frame = %d, buffer = %p,"
3700 "stream type = %d, stream format = %d",
3701 req.frame_number, missed.buffer,
3702 ch->mStreams[0]->getMyType(), missed.stream->format);
3703 ch->timeoutFrame(req.frame_number);
3704 }
3705 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003706 }
3707 }
3708 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003709 //For the very first metadata callback, regardless whether it contains valid
3710 //frame number, send the partial metadata for the jumpstarting requests.
3711 //Note that this has to be done even if the metadata doesn't contain valid
3712 //urgent frame number, because in the case only 1 request is ever submitted
3713 //to HAL, there won't be subsequent valid urgent frame number.
3714 if (mFirstMetadataCallback) {
3715 for (pendingRequestIterator i =
3716 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3717 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003718 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3719 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003720 }
3721 }
3722 mFirstMetadataCallback = false;
3723 }
3724
Thierry Strudel3d639192016-09-09 11:52:26 -07003725 //Partial result on process_capture_result for timestamp
3726 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003727 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003728
3729 //Recieved an urgent Frame Number, handle it
3730 //using partial results
3731 for (pendingRequestIterator i =
3732 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3733 LOGD("Iterator Frame = %d urgent frame = %d",
3734 i->frame_number, urgent_frame_number);
3735
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003736 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003737 (i->partial_result_cnt == 0)) {
3738 LOGE("Error: HAL missed urgent metadata for frame number %d",
3739 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003740 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003741 }
3742
3743 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003744 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003745 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3746 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003747 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3748 // Instant AEC settled for this frame.
3749 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3750 mInstantAECSettledFrameNumber = urgent_frame_number;
3751 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003752 break;
3753 }
3754 }
3755 }
3756
3757 if (!frame_number_valid) {
3758 LOGD("Not a valid normal frame number, used as SOF only");
3759 if (free_and_bufdone_meta_buf) {
3760 mMetadataChannel->bufDone(metadata_buf);
3761 free(metadata_buf);
3762 }
3763 goto done_metadata;
3764 }
3765 LOGH("valid frame_number = %u, capture_time = %lld",
3766 frame_number, capture_time);
3767
Emilian Peev4e0fe952017-06-30 12:40:09 -07003768 handleDepthDataLocked(metadata->depth_data, frame_number,
3769 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003770
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003771 // Check whether any stream buffer corresponding to this is dropped or not
3772 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3773 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3774 for (auto & pendingRequest : mPendingRequestsList) {
3775 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3776 mInstantAECSettledFrameNumber)) {
3777 camera3_notify_msg_t notify_msg = {};
3778 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003779 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003780 QCamera3ProcessingChannel *channel =
3781 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003782 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003783 if (p_cam_frame_drop) {
3784 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003785 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003786 // Got the stream ID for drop frame.
3787 dropFrame = true;
3788 break;
3789 }
3790 }
3791 } else {
3792 // This is instant AEC case.
3793 // For instant AEC drop the stream untill AEC is settled.
3794 dropFrame = true;
3795 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003796
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003797 if (dropFrame) {
3798 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3799 if (p_cam_frame_drop) {
3800 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003801 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003802 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003803 } else {
3804 // For instant AEC, inform frame drop and frame number
3805 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3806 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003807 pendingRequest.frame_number, streamID,
3808 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003809 }
3810 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003811 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003812 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003813 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003814 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003815 if (p_cam_frame_drop) {
3816 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003817 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003818 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003819 } else {
3820 // For instant AEC, inform frame drop and frame number
3821 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3822 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003823 pendingRequest.frame_number, streamID,
3824 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003825 }
3826 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003827 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003828 PendingFrameDrop.stream_ID = streamID;
3829 // Add the Frame drop info to mPendingFrameDropList
3830 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003831 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003832 }
3833 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003834 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003835
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003836 for (auto & pendingRequest : mPendingRequestsList) {
3837 // Find the pending request with the frame number.
3838 if (pendingRequest.frame_number == frame_number) {
3839 // Update the sensor timestamp.
3840 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003841
Thierry Strudel3d639192016-09-09 11:52:26 -07003842
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003843 /* Set the timestamp in display metadata so that clients aware of
3844 private_handle such as VT can use this un-modified timestamps.
3845 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003846 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003847
Thierry Strudel3d639192016-09-09 11:52:26 -07003848 // Find channel requiring metadata, meaning internal offline postprocess
3849 // is needed.
3850 //TODO: for now, we don't support two streams requiring metadata at the same time.
3851 // (because we are not making copies, and metadata buffer is not reference counted.
3852 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003853 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3854 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003855 if (iter->need_metadata) {
3856 internalPproc = true;
3857 QCamera3ProcessingChannel *channel =
3858 (QCamera3ProcessingChannel *)iter->stream->priv;
3859 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003860 if(p_is_metabuf_queued != NULL) {
3861 *p_is_metabuf_queued = true;
3862 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003863 break;
3864 }
3865 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003866 for (auto itr = pendingRequest.internalRequestList.begin();
3867 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003868 if (itr->need_metadata) {
3869 internalPproc = true;
3870 QCamera3ProcessingChannel *channel =
3871 (QCamera3ProcessingChannel *)itr->stream->priv;
3872 channel->queueReprocMetadata(metadata_buf);
3873 break;
3874 }
3875 }
3876
Thierry Strudel54dc9782017-02-15 12:12:10 -08003877 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003878
3879 bool *enableZsl = nullptr;
3880 if (gExposeEnableZslKey) {
3881 enableZsl = &pendingRequest.enableZsl;
3882 }
3883
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003884 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003885 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003886 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003887
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003888 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003889
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003890 if (pendingRequest.blob_request) {
3891 //Dump tuning metadata if enabled and available
3892 char prop[PROPERTY_VALUE_MAX];
3893 memset(prop, 0, sizeof(prop));
3894 property_get("persist.camera.dumpmetadata", prop, "0");
3895 int32_t enabled = atoi(prop);
3896 if (enabled && metadata->is_tuning_params_valid) {
3897 dumpMetadataToFile(metadata->tuning_params,
3898 mMetaFrameCount,
3899 enabled,
3900 "Snapshot",
3901 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003902 }
3903 }
3904
3905 if (!internalPproc) {
3906 LOGD("couldn't find need_metadata for this metadata");
3907 // Return metadata buffer
3908 if (free_and_bufdone_meta_buf) {
3909 mMetadataChannel->bufDone(metadata_buf);
3910 free(metadata_buf);
3911 }
3912 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003913
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003914 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003915 }
3916 }
3917
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003918 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3919
3920 // Try to send out capture result metadata.
3921 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003922 return;
3923
Thierry Strudel3d639192016-09-09 11:52:26 -07003924done_metadata:
3925 for (pendingRequestIterator i = mPendingRequestsList.begin();
3926 i != mPendingRequestsList.end() ;i++) {
3927 i->pipeline_depth++;
3928 }
3929 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3930 unblockRequestIfNecessary();
3931}
3932
3933/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003934 * FUNCTION : handleDepthDataWithLock
3935 *
3936 * DESCRIPTION: Handles incoming depth data
3937 *
3938 * PARAMETERS : @depthData : Depth data
3939 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003940 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003941 *
3942 * RETURN :
3943 *
3944 *==========================================================================*/
3945void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003946 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003947 uint32_t currentFrameNumber;
3948 buffer_handle_t *depthBuffer;
3949
3950 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003951 return;
3952 }
3953
3954 camera3_stream_buffer_t resultBuffer =
3955 {.acquire_fence = -1,
3956 .release_fence = -1,
3957 .status = CAMERA3_BUFFER_STATUS_OK,
3958 .buffer = nullptr,
3959 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003960 do {
3961 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3962 if (nullptr == depthBuffer) {
3963 break;
3964 }
3965
Emilian Peev7650c122017-01-19 08:24:33 -08003966 resultBuffer.buffer = depthBuffer;
3967 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003968 if (valid) {
3969 int32_t rc = mDepthChannel->populateDepthData(depthData,
3970 frameNumber);
3971 if (NO_ERROR != rc) {
3972 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3973 } else {
3974 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3975 }
Emilian Peev7650c122017-01-19 08:24:33 -08003976 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003977 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003978 }
3979 } else if (currentFrameNumber > frameNumber) {
3980 break;
3981 } else {
3982 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3983 {{currentFrameNumber, mDepthChannel->getStream(),
3984 CAMERA3_MSG_ERROR_BUFFER}}};
3985 orchestrateNotify(&notify_msg);
3986
3987 LOGE("Depth buffer for frame number: %d is missing "
3988 "returning back!", currentFrameNumber);
3989 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3990 }
3991 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003992 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003993 } while (currentFrameNumber < frameNumber);
3994}
3995
3996/*===========================================================================
3997 * FUNCTION : notifyErrorFoPendingDepthData
3998 *
3999 * DESCRIPTION: Returns error for any pending depth buffers
4000 *
4001 * PARAMETERS : depthCh - depth channel that needs to get flushed
4002 *
4003 * RETURN :
4004 *
4005 *==========================================================================*/
4006void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
4007 QCamera3DepthChannel *depthCh) {
4008 uint32_t currentFrameNumber;
4009 buffer_handle_t *depthBuffer;
4010
4011 if (nullptr == depthCh) {
4012 return;
4013 }
4014
4015 camera3_notify_msg_t notify_msg =
4016 {.type = CAMERA3_MSG_ERROR,
4017 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4018 camera3_stream_buffer_t resultBuffer =
4019 {.acquire_fence = -1,
4020 .release_fence = -1,
4021 .buffer = nullptr,
4022 .stream = depthCh->getStream(),
4023 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004024
4025 while (nullptr !=
4026 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4027 depthCh->unmapBuffer(currentFrameNumber);
4028
4029 notify_msg.message.error.frame_number = currentFrameNumber;
4030 orchestrateNotify(&notify_msg);
4031
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004032 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004033 };
4034}
4035
4036/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004037 * FUNCTION : hdrPlusPerfLock
4038 *
4039 * DESCRIPTION: perf lock for HDR+ using custom intent
4040 *
4041 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4042 *
4043 * RETURN : None
4044 *
4045 *==========================================================================*/
4046void QCamera3HardwareInterface::hdrPlusPerfLock(
4047 mm_camera_super_buf_t *metadata_buf)
4048{
4049 if (NULL == metadata_buf) {
4050 LOGE("metadata_buf is NULL");
4051 return;
4052 }
4053 metadata_buffer_t *metadata =
4054 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4055 int32_t *p_frame_number_valid =
4056 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4057 uint32_t *p_frame_number =
4058 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4059
4060 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4061 LOGE("%s: Invalid metadata", __func__);
4062 return;
4063 }
4064
Wei Wang01385482017-08-03 10:49:34 -07004065 //acquire perf lock for 2 secs after the last HDR frame is captured
4066 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004067 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4068 if ((p_frame_number != NULL) &&
4069 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004070 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004071 }
4072 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004073}
4074
4075/*===========================================================================
4076 * FUNCTION : handleInputBufferWithLock
4077 *
4078 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4079 *
4080 * PARAMETERS : @frame_number: frame number of the input buffer
4081 *
4082 * RETURN :
4083 *
4084 *==========================================================================*/
4085void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4086{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004087 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004088 pendingRequestIterator i = mPendingRequestsList.begin();
4089 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4090 i++;
4091 }
4092 if (i != mPendingRequestsList.end() && i->input_buffer) {
4093 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004094 CameraMetadata settings;
4095 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4096 if(i->settings) {
4097 settings = i->settings;
4098 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4099 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004100 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004101 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004102 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004103 } else {
4104 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004105 }
4106
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004107 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4108 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4109 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004110
4111 camera3_capture_result result;
4112 memset(&result, 0, sizeof(camera3_capture_result));
4113 result.frame_number = frame_number;
4114 result.result = i->settings;
4115 result.input_buffer = i->input_buffer;
4116 result.partial_result = PARTIAL_RESULT_COUNT;
4117
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004118 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004119 LOGD("Input request metadata and input buffer frame_number = %u",
4120 i->frame_number);
4121 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004122
4123 // Dispatch result metadata that may be just unblocked by this reprocess result.
4124 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004125 } else {
4126 LOGE("Could not find input request for frame number %d", frame_number);
4127 }
4128}
4129
4130/*===========================================================================
4131 * FUNCTION : handleBufferWithLock
4132 *
4133 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4134 *
4135 * PARAMETERS : @buffer: image buffer for the callback
4136 * @frame_number: frame number of the image buffer
4137 *
4138 * RETURN :
4139 *
4140 *==========================================================================*/
4141void QCamera3HardwareInterface::handleBufferWithLock(
4142 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4143{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004144 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004145
4146 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4147 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4148 }
4149
Thierry Strudel3d639192016-09-09 11:52:26 -07004150 /* Nothing to be done during error state */
4151 if ((ERROR == mState) || (DEINIT == mState)) {
4152 return;
4153 }
4154 if (mFlushPerf) {
4155 handleBuffersDuringFlushLock(buffer);
4156 return;
4157 }
4158 //not in flush
4159 // If the frame number doesn't exist in the pending request list,
4160 // directly send the buffer to the frameworks, and update pending buffers map
4161 // Otherwise, book-keep the buffer.
4162 pendingRequestIterator i = mPendingRequestsList.begin();
4163 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4164 i++;
4165 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004166
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004167 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004168 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004169 // For a reprocessing request, try to send out result metadata.
4170 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004171 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004172 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004173
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004174 // Check if this frame was dropped.
4175 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4176 m != mPendingFrameDropList.end(); m++) {
4177 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4178 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4179 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4180 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4181 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4182 frame_number, streamID);
4183 m = mPendingFrameDropList.erase(m);
4184 break;
4185 }
4186 }
4187
Binhao Lin09245482017-08-31 18:25:29 -07004188 // WAR for encoder avtimer timestamp issue
4189 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4190 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
4191 m_bAVTimerEnabled) {
4192 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
4193 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
4194 if (req->frame_number != frame_number)
4195 continue;
4196 if(req->av_timestamp == 0) {
4197 buffer->status |= CAMERA3_BUFFER_STATUS_ERROR;
4198 }
4199 else {
4200 struct private_handle_t *priv_handle =
4201 (struct private_handle_t *) (*(buffer->buffer));
4202 setMetaData(priv_handle, SET_VT_TIMESTAMP, &(req->av_timestamp));
4203 }
4204 }
4205 }
4206
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004207 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4208 LOGH("result frame_number = %d, buffer = %p",
4209 frame_number, buffer->buffer);
4210
4211 mPendingBuffersMap.removeBuf(buffer->buffer);
4212 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4213
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004214 if (mPreviewStarted == false) {
4215 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4216 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004217 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4218
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004219 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4220 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4221 mPreviewStarted = true;
4222
4223 // Set power hint for preview
4224 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4225 }
4226 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004227}
4228
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004229void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004230 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004231{
4232 // Find the pending request for this result metadata.
4233 auto requestIter = mPendingRequestsList.begin();
4234 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4235 requestIter++;
4236 }
4237
4238 if (requestIter == mPendingRequestsList.end()) {
4239 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4240 return;
4241 }
4242
4243 // Update the result metadata
4244 requestIter->resultMetadata = resultMetadata;
4245
4246 // Check what type of request this is.
4247 bool liveRequest = false;
4248 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004249 // HDR+ request doesn't have partial results.
4250 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004251 } else if (requestIter->input_buffer != nullptr) {
4252 // Reprocessing request result is the same as settings.
4253 requestIter->resultMetadata = requestIter->settings;
4254 // Reprocessing request doesn't have partial results.
4255 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4256 } else {
4257 liveRequest = true;
Chien-Yu Chen0a921f92017-08-27 17:25:33 -07004258 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004259 mPendingLiveRequest--;
4260
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004261 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004262 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004263 // For a live request, send the metadata to HDR+ client.
4264 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4265 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4266 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4267 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004268 }
4269 }
4270
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004271 // Remove len shading map if it's not requested.
4272 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4273 CameraMetadata metadata;
4274 metadata.acquire(resultMetadata);
4275 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4276 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4277 &requestIter->requestedLensShadingMapMode, 1);
4278
4279 requestIter->resultMetadata = metadata.release();
4280 }
4281
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004282 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4283}
4284
4285void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4286 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004287 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4288 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004289 bool readyToSend = true;
4290
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004291 // Iterate through the pending requests to send out result metadata that are ready. Also if
4292 // this result metadata belongs to a live request, notify errors for previous live requests
4293 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004294 auto iter = mPendingRequestsList.begin();
4295 while (iter != mPendingRequestsList.end()) {
4296 // Check if current pending request is ready. If it's not ready, the following pending
4297 // requests are also not ready.
4298 if (readyToSend && iter->resultMetadata == nullptr) {
4299 readyToSend = false;
4300 }
4301
4302 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4303
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004304 camera3_capture_result_t result = {};
4305 result.frame_number = iter->frame_number;
4306 result.result = iter->resultMetadata;
4307 result.partial_result = iter->partial_result_cnt;
4308
4309 // If this pending buffer has result metadata, we may be able to send out shutter callback
4310 // and result metadata.
4311 if (iter->resultMetadata != nullptr) {
4312 if (!readyToSend) {
4313 // If any of the previous pending request is not ready, this pending request is
4314 // also not ready to send in order to keep shutter callbacks and result metadata
4315 // in order.
4316 iter++;
4317 continue;
4318 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004319 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004320 // If the result metadata belongs to a live request, notify errors for previous pending
4321 // live requests.
4322 mPendingLiveRequest--;
4323
4324 CameraMetadata dummyMetadata;
4325 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4326 result.result = dummyMetadata.release();
4327
4328 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004329
4330 // partial_result should be PARTIAL_RESULT_CNT in case of
4331 // ERROR_RESULT.
4332 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4333 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004334 } else {
4335 iter++;
4336 continue;
4337 }
4338
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004339 result.output_buffers = nullptr;
4340 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004341 orchestrateResult(&result);
4342
4343 // For reprocessing, result metadata is the same as settings so do not free it here to
4344 // avoid double free.
4345 if (result.result != iter->settings) {
4346 free_camera_metadata((camera_metadata_t *)result.result);
4347 }
4348 iter->resultMetadata = nullptr;
4349 iter = erasePendingRequest(iter);
4350 }
4351
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004352 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004353 for (auto &iter : mPendingRequestsList) {
4354 // Increment pipeline depth for the following pending requests.
4355 if (iter.frame_number > frameNumber) {
4356 iter.pipeline_depth++;
4357 }
4358 }
4359 }
4360
4361 unblockRequestIfNecessary();
4362}
4363
Thierry Strudel3d639192016-09-09 11:52:26 -07004364/*===========================================================================
4365 * FUNCTION : unblockRequestIfNecessary
4366 *
4367 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4368 * that mMutex is held when this function is called.
4369 *
4370 * PARAMETERS :
4371 *
4372 * RETURN :
4373 *
4374 *==========================================================================*/
4375void QCamera3HardwareInterface::unblockRequestIfNecessary()
4376{
4377 // Unblock process_capture_request
4378 pthread_cond_signal(&mRequestCond);
4379}
4380
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004381/*===========================================================================
4382 * FUNCTION : isHdrSnapshotRequest
4383 *
4384 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4385 *
4386 * PARAMETERS : camera3 request structure
4387 *
4388 * RETURN : boolean decision variable
4389 *
4390 *==========================================================================*/
4391bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4392{
4393 if (request == NULL) {
4394 LOGE("Invalid request handle");
4395 assert(0);
4396 return false;
4397 }
4398
4399 if (!mForceHdrSnapshot) {
4400 CameraMetadata frame_settings;
4401 frame_settings = request->settings;
4402
4403 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4404 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4405 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4406 return false;
4407 }
4408 } else {
4409 return false;
4410 }
4411
4412 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4413 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4414 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4415 return false;
4416 }
4417 } else {
4418 return false;
4419 }
4420 }
4421
4422 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4423 if (request->output_buffers[i].stream->format
4424 == HAL_PIXEL_FORMAT_BLOB) {
4425 return true;
4426 }
4427 }
4428
4429 return false;
4430}
4431/*===========================================================================
4432 * FUNCTION : orchestrateRequest
4433 *
4434 * DESCRIPTION: Orchestrates a capture request from camera service
4435 *
4436 * PARAMETERS :
4437 * @request : request from framework to process
4438 *
4439 * RETURN : Error status codes
4440 *
4441 *==========================================================================*/
4442int32_t QCamera3HardwareInterface::orchestrateRequest(
4443 camera3_capture_request_t *request)
4444{
4445
4446 uint32_t originalFrameNumber = request->frame_number;
4447 uint32_t originalOutputCount = request->num_output_buffers;
4448 const camera_metadata_t *original_settings = request->settings;
4449 List<InternalRequest> internallyRequestedStreams;
4450 List<InternalRequest> emptyInternalList;
4451
4452 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4453 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4454 uint32_t internalFrameNumber;
4455 CameraMetadata modified_meta;
4456
4457
4458 /* Add Blob channel to list of internally requested streams */
4459 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4460 if (request->output_buffers[i].stream->format
4461 == HAL_PIXEL_FORMAT_BLOB) {
4462 InternalRequest streamRequested;
4463 streamRequested.meteringOnly = 1;
4464 streamRequested.need_metadata = 0;
4465 streamRequested.stream = request->output_buffers[i].stream;
4466 internallyRequestedStreams.push_back(streamRequested);
4467 }
4468 }
4469 request->num_output_buffers = 0;
4470 auto itr = internallyRequestedStreams.begin();
4471
4472 /* Modify setting to set compensation */
4473 modified_meta = request->settings;
4474 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4475 uint8_t aeLock = 1;
4476 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4477 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4478 camera_metadata_t *modified_settings = modified_meta.release();
4479 request->settings = modified_settings;
4480
4481 /* Capture Settling & -2x frame */
4482 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4483 request->frame_number = internalFrameNumber;
4484 processCaptureRequest(request, internallyRequestedStreams);
4485
4486 request->num_output_buffers = originalOutputCount;
4487 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4488 request->frame_number = internalFrameNumber;
4489 processCaptureRequest(request, emptyInternalList);
4490 request->num_output_buffers = 0;
4491
4492 modified_meta = modified_settings;
4493 expCompensation = 0;
4494 aeLock = 1;
4495 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4496 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4497 modified_settings = modified_meta.release();
4498 request->settings = modified_settings;
4499
4500 /* Capture Settling & 0X frame */
4501
4502 itr = internallyRequestedStreams.begin();
4503 if (itr == internallyRequestedStreams.end()) {
4504 LOGE("Error Internally Requested Stream list is empty");
4505 assert(0);
4506 } else {
4507 itr->need_metadata = 0;
4508 itr->meteringOnly = 1;
4509 }
4510
4511 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4512 request->frame_number = internalFrameNumber;
4513 processCaptureRequest(request, internallyRequestedStreams);
4514
4515 itr = internallyRequestedStreams.begin();
4516 if (itr == internallyRequestedStreams.end()) {
4517 ALOGE("Error Internally Requested Stream list is empty");
4518 assert(0);
4519 } else {
4520 itr->need_metadata = 1;
4521 itr->meteringOnly = 0;
4522 }
4523
4524 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4525 request->frame_number = internalFrameNumber;
4526 processCaptureRequest(request, internallyRequestedStreams);
4527
4528 /* Capture 2X frame*/
4529 modified_meta = modified_settings;
4530 expCompensation = GB_HDR_2X_STEP_EV;
4531 aeLock = 1;
4532 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4533 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4534 modified_settings = modified_meta.release();
4535 request->settings = modified_settings;
4536
4537 itr = internallyRequestedStreams.begin();
4538 if (itr == internallyRequestedStreams.end()) {
4539 ALOGE("Error Internally Requested Stream list is empty");
4540 assert(0);
4541 } else {
4542 itr->need_metadata = 0;
4543 itr->meteringOnly = 1;
4544 }
4545 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4546 request->frame_number = internalFrameNumber;
4547 processCaptureRequest(request, internallyRequestedStreams);
4548
4549 itr = internallyRequestedStreams.begin();
4550 if (itr == internallyRequestedStreams.end()) {
4551 ALOGE("Error Internally Requested Stream list is empty");
4552 assert(0);
4553 } else {
4554 itr->need_metadata = 1;
4555 itr->meteringOnly = 0;
4556 }
4557
4558 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4559 request->frame_number = internalFrameNumber;
4560 processCaptureRequest(request, internallyRequestedStreams);
4561
4562
4563 /* Capture 2X on original streaming config*/
4564 internallyRequestedStreams.clear();
4565
4566 /* Restore original settings pointer */
4567 request->settings = original_settings;
4568 } else {
4569 uint32_t internalFrameNumber;
4570 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4571 request->frame_number = internalFrameNumber;
4572 return processCaptureRequest(request, internallyRequestedStreams);
4573 }
4574
4575 return NO_ERROR;
4576}
4577
4578/*===========================================================================
4579 * FUNCTION : orchestrateResult
4580 *
4581 * DESCRIPTION: Orchestrates a capture result to camera service
4582 *
4583 * PARAMETERS :
4584 * @request : request from framework to process
4585 *
4586 * RETURN :
4587 *
4588 *==========================================================================*/
4589void QCamera3HardwareInterface::orchestrateResult(
4590 camera3_capture_result_t *result)
4591{
4592 uint32_t frameworkFrameNumber;
4593 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4594 frameworkFrameNumber);
4595 if (rc != NO_ERROR) {
4596 LOGE("Cannot find translated frameworkFrameNumber");
4597 assert(0);
4598 } else {
4599 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004600 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004601 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004602 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004603 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4604 camera_metadata_entry_t entry;
4605 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4606 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004607 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004608 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4609 if (ret != OK)
4610 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004611 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004612 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004613 result->frame_number = frameworkFrameNumber;
4614 mCallbackOps->process_capture_result(mCallbackOps, result);
4615 }
4616 }
4617}
4618
4619/*===========================================================================
4620 * FUNCTION : orchestrateNotify
4621 *
4622 * DESCRIPTION: Orchestrates a notify to camera service
4623 *
4624 * PARAMETERS :
4625 * @request : request from framework to process
4626 *
4627 * RETURN :
4628 *
4629 *==========================================================================*/
4630void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4631{
4632 uint32_t frameworkFrameNumber;
4633 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004634 int32_t rc = NO_ERROR;
4635
4636 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004637 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004638
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004639 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004640 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4641 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4642 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004643 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004644 LOGE("Cannot find translated frameworkFrameNumber");
4645 assert(0);
4646 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004647 }
4648 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004649
4650 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4651 LOGD("Internal Request drop the notifyCb");
4652 } else {
4653 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4654 mCallbackOps->notify(mCallbackOps, notify_msg);
4655 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004656}
4657
4658/*===========================================================================
4659 * FUNCTION : FrameNumberRegistry
4660 *
4661 * DESCRIPTION: Constructor
4662 *
4663 * PARAMETERS :
4664 *
4665 * RETURN :
4666 *
4667 *==========================================================================*/
4668FrameNumberRegistry::FrameNumberRegistry()
4669{
4670 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4671}
4672
4673/*===========================================================================
4674 * FUNCTION : ~FrameNumberRegistry
4675 *
4676 * DESCRIPTION: Destructor
4677 *
4678 * PARAMETERS :
4679 *
4680 * RETURN :
4681 *
4682 *==========================================================================*/
4683FrameNumberRegistry::~FrameNumberRegistry()
4684{
4685}
4686
4687/*===========================================================================
4688 * FUNCTION : PurgeOldEntriesLocked
4689 *
4690 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4691 *
4692 * PARAMETERS :
4693 *
4694 * RETURN : NONE
4695 *
4696 *==========================================================================*/
4697void FrameNumberRegistry::purgeOldEntriesLocked()
4698{
4699 while (_register.begin() != _register.end()) {
4700 auto itr = _register.begin();
4701 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4702 _register.erase(itr);
4703 } else {
4704 return;
4705 }
4706 }
4707}
4708
4709/*===========================================================================
4710 * FUNCTION : allocStoreInternalFrameNumber
4711 *
4712 * DESCRIPTION: Method to note down a framework request and associate a new
4713 * internal request number against it
4714 *
4715 * PARAMETERS :
4716 * @fFrameNumber: Identifier given by framework
4717 * @internalFN : Output parameter which will have the newly generated internal
4718 * entry
4719 *
4720 * RETURN : Error code
4721 *
4722 *==========================================================================*/
4723int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4724 uint32_t &internalFrameNumber)
4725{
4726 Mutex::Autolock lock(mRegistryLock);
4727 internalFrameNumber = _nextFreeInternalNumber++;
4728 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4729 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4730 purgeOldEntriesLocked();
4731 return NO_ERROR;
4732}
4733
4734/*===========================================================================
4735 * FUNCTION : generateStoreInternalFrameNumber
4736 *
4737 * DESCRIPTION: Method to associate a new internal request number independent
4738 * of any associate with framework requests
4739 *
4740 * PARAMETERS :
4741 * @internalFrame#: Output parameter which will have the newly generated internal
4742 *
4743 *
4744 * RETURN : Error code
4745 *
4746 *==========================================================================*/
4747int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4748{
4749 Mutex::Autolock lock(mRegistryLock);
4750 internalFrameNumber = _nextFreeInternalNumber++;
4751 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4752 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4753 purgeOldEntriesLocked();
4754 return NO_ERROR;
4755}
4756
4757/*===========================================================================
4758 * FUNCTION : getFrameworkFrameNumber
4759 *
4760 * DESCRIPTION: Method to query the framework framenumber given an internal #
4761 *
4762 * PARAMETERS :
4763 * @internalFrame#: Internal reference
4764 * @frameworkframenumber: Output parameter holding framework frame entry
4765 *
4766 * RETURN : Error code
4767 *
4768 *==========================================================================*/
4769int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4770 uint32_t &frameworkFrameNumber)
4771{
4772 Mutex::Autolock lock(mRegistryLock);
4773 auto itr = _register.find(internalFrameNumber);
4774 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004775 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004776 return -ENOENT;
4777 }
4778
4779 frameworkFrameNumber = itr->second;
4780 purgeOldEntriesLocked();
4781 return NO_ERROR;
4782}
Thierry Strudel3d639192016-09-09 11:52:26 -07004783
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004784status_t QCamera3HardwareInterface::fillPbStreamConfig(
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004785 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, QCamera3Channel *channel,
4786 uint32_t streamIndex) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004787 if (config == nullptr) {
4788 LOGE("%s: config is null", __FUNCTION__);
4789 return BAD_VALUE;
4790 }
4791
4792 if (channel == nullptr) {
4793 LOGE("%s: channel is null", __FUNCTION__);
4794 return BAD_VALUE;
4795 }
4796
4797 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4798 if (stream == nullptr) {
4799 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4800 return NAME_NOT_FOUND;
4801 }
4802
4803 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4804 if (streamInfo == nullptr) {
4805 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4806 return NAME_NOT_FOUND;
4807 }
4808
4809 config->id = pbStreamId;
4810 config->image.width = streamInfo->dim.width;
4811 config->image.height = streamInfo->dim.height;
4812 config->image.padding = 0;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004813
4814 int bytesPerPixel = 0;
4815
4816 switch (streamInfo->fmt) {
4817 case CAM_FORMAT_YUV_420_NV21:
4818 config->image.format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
4819 bytesPerPixel = 1;
4820 break;
4821 case CAM_FORMAT_YUV_420_NV12:
4822 case CAM_FORMAT_YUV_420_NV12_VENUS:
4823 config->image.format = HAL_PIXEL_FORMAT_YCbCr_420_SP;
4824 bytesPerPixel = 1;
4825 break;
4826 default:
4827 ALOGE("%s: Stream format %d not supported.", __FUNCTION__, streamInfo->fmt);
4828 return BAD_VALUE;
4829 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004830
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004831 uint32_t totalPlaneSize = 0;
4832
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004833 // Fill plane information.
4834 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4835 pbcamera::PlaneConfiguration plane;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004836 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride * bytesPerPixel;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004837 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4838 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004839
4840 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004841 }
4842
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004843 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004844 return OK;
4845}
4846
Thierry Strudel3d639192016-09-09 11:52:26 -07004847/*===========================================================================
4848 * FUNCTION : processCaptureRequest
4849 *
4850 * DESCRIPTION: process a capture request from camera service
4851 *
4852 * PARAMETERS :
4853 * @request : request from framework to process
4854 *
4855 * RETURN :
4856 *
4857 *==========================================================================*/
4858int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004859 camera3_capture_request_t *request,
4860 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004861{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004862 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004863 int rc = NO_ERROR;
4864 int32_t request_id;
4865 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004866 bool isVidBufRequested = false;
4867 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004868 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004869
4870 pthread_mutex_lock(&mMutex);
4871
4872 // Validate current state
4873 switch (mState) {
4874 case CONFIGURED:
4875 case STARTED:
4876 /* valid state */
4877 break;
4878
4879 case ERROR:
4880 pthread_mutex_unlock(&mMutex);
4881 handleCameraDeviceError();
4882 return -ENODEV;
4883
4884 default:
4885 LOGE("Invalid state %d", mState);
4886 pthread_mutex_unlock(&mMutex);
4887 return -ENODEV;
4888 }
4889
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004890 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004891 if (rc != NO_ERROR) {
4892 LOGE("incoming request is not valid");
4893 pthread_mutex_unlock(&mMutex);
4894 return rc;
4895 }
4896
4897 meta = request->settings;
4898
4899 // For first capture request, send capture intent, and
4900 // stream on all streams
4901 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004902 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004903 // send an unconfigure to the backend so that the isp
4904 // resources are deallocated
4905 if (!mFirstConfiguration) {
4906 cam_stream_size_info_t stream_config_info;
4907 int32_t hal_version = CAM_HAL_V3;
4908 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4909 stream_config_info.buffer_info.min_buffers =
4910 MIN_INFLIGHT_REQUESTS;
4911 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004912 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004913 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004914 clear_metadata_buffer(mParameters);
4915 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4916 CAM_INTF_PARM_HAL_VERSION, hal_version);
4917 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4918 CAM_INTF_META_STREAM_INFO, stream_config_info);
4919 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4920 mParameters);
4921 if (rc < 0) {
4922 LOGE("set_parms for unconfigure failed");
4923 pthread_mutex_unlock(&mMutex);
4924 return rc;
4925 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004926
Thierry Strudel3d639192016-09-09 11:52:26 -07004927 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004928 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004929 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004930 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004931 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004932 property_get("persist.camera.is_type", is_type_value, "4");
4933 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4934 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4935 property_get("persist.camera.is_type_preview", is_type_value, "4");
4936 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4937 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004938
4939 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4940 int32_t hal_version = CAM_HAL_V3;
4941 uint8_t captureIntent =
4942 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4943 mCaptureIntent = captureIntent;
4944 clear_metadata_buffer(mParameters);
4945 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4946 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4947 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004948 if (mFirstConfiguration) {
4949 // configure instant AEC
4950 // Instant AEC is a session based parameter and it is needed only
4951 // once per complete session after open camera.
4952 // i.e. This is set only once for the first capture request, after open camera.
4953 setInstantAEC(meta);
4954 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004955 uint8_t fwkVideoStabMode=0;
4956 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4957 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4958 }
4959
Xue Tuecac74e2017-04-17 13:58:15 -07004960 // If EIS setprop is enabled then only turn it on for video/preview
4961 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004962 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004963 int32_t vsMode;
4964 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4965 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4966 rc = BAD_VALUE;
4967 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004968 LOGD("setEis %d", setEis);
4969 bool eis3Supported = false;
4970 size_t count = IS_TYPE_MAX;
4971 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4972 for (size_t i = 0; i < count; i++) {
4973 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4974 eis3Supported = true;
4975 break;
4976 }
4977 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004978
4979 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004980 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004981 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4982 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004983 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4984 is_type = isTypePreview;
4985 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4986 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4987 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004988 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004989 } else {
4990 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004991 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004992 } else {
4993 is_type = IS_TYPE_NONE;
4994 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004995 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004996 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004997 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4998 }
4999 }
5000
5001 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5002 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
5003
Thierry Strudel54dc9782017-02-15 12:12:10 -08005004 //Disable tintless only if the property is set to 0
5005 memset(prop, 0, sizeof(prop));
5006 property_get("persist.camera.tintless.enable", prop, "1");
5007 int32_t tintless_value = atoi(prop);
5008
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5010 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08005011
Thierry Strudel3d639192016-09-09 11:52:26 -07005012 //Disable CDS for HFR mode or if DIS/EIS is on.
5013 //CDS is a session parameter in the backend/ISP, so need to be set/reset
5014 //after every configure_stream
5015 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
5016 (m_bIsVideo)) {
5017 int32_t cds = CAM_CDS_MODE_OFF;
5018 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5019 CAM_INTF_PARM_CDS_MODE, cds))
5020 LOGE("Failed to disable CDS for HFR mode");
5021
5022 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005023
5024 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
5025 uint8_t* use_av_timer = NULL;
5026
5027 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005028 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005029 use_av_timer = &m_debug_avtimer;
Binhao Lin09245482017-08-31 18:25:29 -07005030 m_bAVTimerEnabled = true;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005031 }
5032 else{
5033 use_av_timer =
5034 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005035 if (use_av_timer) {
Binhao Lin09245482017-08-31 18:25:29 -07005036 m_bAVTimerEnabled = true;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005037 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
5038 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005039 }
5040
5041 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
5042 rc = BAD_VALUE;
5043 }
5044 }
5045
Thierry Strudel3d639192016-09-09 11:52:26 -07005046 setMobicat();
5047
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005048 uint8_t nrMode = 0;
5049 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
5050 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5051 }
5052
Thierry Strudel3d639192016-09-09 11:52:26 -07005053 /* Set fps and hfr mode while sending meta stream info so that sensor
5054 * can configure appropriate streaming mode */
5055 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005056 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5057 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005058 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5059 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005060 if (rc == NO_ERROR) {
5061 int32_t max_fps =
5062 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005063 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005064 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5065 }
5066 /* For HFR, more buffers are dequeued upfront to improve the performance */
5067 if (mBatchSize) {
5068 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5069 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5070 }
5071 }
5072 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005073 LOGE("setHalFpsRange failed");
5074 }
5075 }
5076 if (meta.exists(ANDROID_CONTROL_MODE)) {
5077 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5078 rc = extractSceneMode(meta, metaMode, mParameters);
5079 if (rc != NO_ERROR) {
5080 LOGE("extractSceneMode failed");
5081 }
5082 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005083 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005084
Thierry Strudel04e026f2016-10-10 11:27:36 -07005085 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5086 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5087 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5088 rc = setVideoHdrMode(mParameters, vhdr);
5089 if (rc != NO_ERROR) {
5090 LOGE("setVideoHDR is failed");
5091 }
5092 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005093
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005094 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005095 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005096 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005097 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5098 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5099 sensorModeFullFov)) {
5100 rc = BAD_VALUE;
5101 }
5102 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005103 //TODO: validate the arguments, HSV scenemode should have only the
5104 //advertised fps ranges
5105
5106 /*set the capture intent, hal version, tintless, stream info,
5107 *and disenable parameters to the backend*/
5108 LOGD("set_parms META_STREAM_INFO " );
5109 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005110 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5111 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005112 mStreamConfigInfo.type[i],
5113 mStreamConfigInfo.stream_sizes[i].width,
5114 mStreamConfigInfo.stream_sizes[i].height,
5115 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005116 mStreamConfigInfo.format[i],
5117 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005118 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005119
Thierry Strudel3d639192016-09-09 11:52:26 -07005120 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5121 mParameters);
5122 if (rc < 0) {
5123 LOGE("set_parms failed for hal version, stream info");
5124 }
5125
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005126 cam_sensor_mode_info_t sensorModeInfo = {};
5127 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005128 if (rc != NO_ERROR) {
5129 LOGE("Failed to get sensor output size");
5130 pthread_mutex_unlock(&mMutex);
5131 goto error_exit;
5132 }
5133
5134 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5135 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005136 sensorModeInfo.active_array_size.width,
5137 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005138
5139 /* Set batchmode before initializing channel. Since registerBuffer
5140 * internally initializes some of the channels, better set batchmode
5141 * even before first register buffer */
5142 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5143 it != mStreamInfo.end(); it++) {
5144 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5145 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5146 && mBatchSize) {
5147 rc = channel->setBatchSize(mBatchSize);
5148 //Disable per frame map unmap for HFR/batchmode case
5149 rc |= channel->setPerFrameMapUnmap(false);
5150 if (NO_ERROR != rc) {
5151 LOGE("Channel init failed %d", rc);
5152 pthread_mutex_unlock(&mMutex);
5153 goto error_exit;
5154 }
5155 }
5156 }
5157
5158 //First initialize all streams
5159 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5160 it != mStreamInfo.end(); it++) {
5161 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005162
5163 /* Initial value of NR mode is needed before stream on */
5164 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005165 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5166 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005167 setEis) {
5168 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5169 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5170 is_type = mStreamConfigInfo.is_type[i];
5171 break;
5172 }
5173 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005174 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005175 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005176 rc = channel->initialize(IS_TYPE_NONE);
5177 }
5178 if (NO_ERROR != rc) {
5179 LOGE("Channel initialization failed %d", rc);
5180 pthread_mutex_unlock(&mMutex);
5181 goto error_exit;
5182 }
5183 }
5184
5185 if (mRawDumpChannel) {
5186 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5187 if (rc != NO_ERROR) {
5188 LOGE("Error: Raw Dump Channel init failed");
5189 pthread_mutex_unlock(&mMutex);
5190 goto error_exit;
5191 }
5192 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005193 if (mHdrPlusRawSrcChannel) {
5194 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5195 if (rc != NO_ERROR) {
5196 LOGE("Error: HDR+ RAW Source Channel init failed");
5197 pthread_mutex_unlock(&mMutex);
5198 goto error_exit;
5199 }
5200 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005201 if (mSupportChannel) {
5202 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5203 if (rc < 0) {
5204 LOGE("Support channel initialization failed");
5205 pthread_mutex_unlock(&mMutex);
5206 goto error_exit;
5207 }
5208 }
5209 if (mAnalysisChannel) {
5210 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5211 if (rc < 0) {
5212 LOGE("Analysis channel initialization failed");
5213 pthread_mutex_unlock(&mMutex);
5214 goto error_exit;
5215 }
5216 }
5217 if (mDummyBatchChannel) {
5218 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5219 if (rc < 0) {
5220 LOGE("mDummyBatchChannel setBatchSize failed");
5221 pthread_mutex_unlock(&mMutex);
5222 goto error_exit;
5223 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005224 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005225 if (rc < 0) {
5226 LOGE("mDummyBatchChannel initialization failed");
5227 pthread_mutex_unlock(&mMutex);
5228 goto error_exit;
5229 }
5230 }
5231
5232 // Set bundle info
5233 rc = setBundleInfo();
5234 if (rc < 0) {
5235 LOGE("setBundleInfo failed %d", rc);
5236 pthread_mutex_unlock(&mMutex);
5237 goto error_exit;
5238 }
5239
5240 //update settings from app here
5241 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5242 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5243 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5244 }
5245 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5246 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5247 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5248 }
5249 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5250 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5251 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5252
5253 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5254 (mLinkedCameraId != mCameraId) ) {
5255 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5256 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005257 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005258 goto error_exit;
5259 }
5260 }
5261
5262 // add bundle related cameras
5263 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5264 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005265 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5266 &m_pDualCamCmdPtr->bundle_info;
5267 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005268 if (mIsDeviceLinked)
5269 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5270 else
5271 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5272
5273 pthread_mutex_lock(&gCamLock);
5274
5275 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5276 LOGE("Dualcam: Invalid Session Id ");
5277 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005278 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005279 goto error_exit;
5280 }
5281
5282 if (mIsMainCamera == 1) {
5283 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5284 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005285 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005286 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005287 // related session id should be session id of linked session
5288 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5289 } else {
5290 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5291 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005292 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005293 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005294 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5295 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005296 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005297 pthread_mutex_unlock(&gCamLock);
5298
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005299 rc = mCameraHandle->ops->set_dual_cam_cmd(
5300 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005301 if (rc < 0) {
5302 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005303 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005304 goto error_exit;
5305 }
5306 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005307 goto no_error;
5308error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005309 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005310 return rc;
5311no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005312 mWokenUpByDaemon = false;
5313 mPendingLiveRequest = 0;
5314 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005315 }
5316
5317 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005318 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005319
5320 if (mFlushPerf) {
5321 //we cannot accept any requests during flush
5322 LOGE("process_capture_request cannot proceed during flush");
5323 pthread_mutex_unlock(&mMutex);
5324 return NO_ERROR; //should return an error
5325 }
5326
5327 if (meta.exists(ANDROID_REQUEST_ID)) {
5328 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5329 mCurrentRequestId = request_id;
5330 LOGD("Received request with id: %d", request_id);
5331 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5332 LOGE("Unable to find request id field, \
5333 & no previous id available");
5334 pthread_mutex_unlock(&mMutex);
5335 return NAME_NOT_FOUND;
5336 } else {
5337 LOGD("Re-using old request id");
5338 request_id = mCurrentRequestId;
5339 }
5340
5341 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5342 request->num_output_buffers,
5343 request->input_buffer,
5344 frameNumber);
5345 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005346 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005347 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005348 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005349 uint32_t snapshotStreamId = 0;
5350 for (size_t i = 0; i < request->num_output_buffers; i++) {
5351 const camera3_stream_buffer_t& output = request->output_buffers[i];
5352 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5353
Emilian Peev7650c122017-01-19 08:24:33 -08005354 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5355 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005356 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005357 blob_request = 1;
5358 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5359 }
5360
5361 if (output.acquire_fence != -1) {
5362 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5363 close(output.acquire_fence);
5364 if (rc != OK) {
5365 LOGE("sync wait failed %d", rc);
5366 pthread_mutex_unlock(&mMutex);
5367 return rc;
5368 }
5369 }
5370
Emilian Peev0f3c3162017-03-15 12:57:46 +00005371 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5372 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005373 depthRequestPresent = true;
5374 continue;
5375 }
5376
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005377 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005378 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005379
5380 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5381 isVidBufRequested = true;
5382 }
5383 }
5384
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005385 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5386 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5387 itr++) {
5388 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5389 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5390 channel->getStreamID(channel->getStreamTypeMask());
5391
5392 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5393 isVidBufRequested = true;
5394 }
5395 }
5396
Thierry Strudel3d639192016-09-09 11:52:26 -07005397 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005398 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005399 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005400 }
5401 if (blob_request && mRawDumpChannel) {
5402 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005403 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005404 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005405 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005406 }
5407
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005408 {
5409 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5410 // Request a RAW buffer if
5411 // 1. mHdrPlusRawSrcChannel is valid.
5412 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5413 // 3. There is no pending HDR+ request.
5414 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5415 mHdrPlusPendingRequests.size() == 0) {
5416 streamsArray.stream_request[streamsArray.num_streams].streamID =
5417 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5418 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5419 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005420 }
5421
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005422 //extract capture intent
5423 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5424 mCaptureIntent =
5425 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5426 }
5427
5428 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5429 mCacMode =
5430 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5431 }
5432
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005433 uint8_t requestedLensShadingMapMode;
5434 // Get the shading map mode.
5435 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5436 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5437 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5438 } else {
5439 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5440 }
5441
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005442 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005443 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005444
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005445 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005446 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005447 // If this request has a still capture intent, try to submit an HDR+ request.
5448 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5449 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5450 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5451 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005452 }
5453
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005454 if (hdrPlusRequest) {
5455 // For a HDR+ request, just set the frame parameters.
5456 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5457 if (rc < 0) {
5458 LOGE("fail to set frame parameters");
5459 pthread_mutex_unlock(&mMutex);
5460 return rc;
5461 }
5462 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005463 /* Parse the settings:
5464 * - For every request in NORMAL MODE
5465 * - For every request in HFR mode during preview only case
5466 * - For first request of every batch in HFR mode during video
5467 * recording. In batchmode the same settings except frame number is
5468 * repeated in each request of the batch.
5469 */
5470 if (!mBatchSize ||
5471 (mBatchSize && !isVidBufRequested) ||
5472 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005473 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005474 if (rc < 0) {
5475 LOGE("fail to set frame parameters");
5476 pthread_mutex_unlock(&mMutex);
5477 return rc;
5478 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005479
5480 {
5481 // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
5482 // will be reported in result metadata.
5483 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5484 if (mHdrPlusModeEnabled) {
5485 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5486 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5487 }
5488 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005489 }
5490 /* For batchMode HFR, setFrameParameters is not called for every
5491 * request. But only frame number of the latest request is parsed.
5492 * Keep track of first and last frame numbers in a batch so that
5493 * metadata for the frame numbers of batch can be duplicated in
5494 * handleBatchMetadta */
5495 if (mBatchSize) {
5496 if (!mToBeQueuedVidBufs) {
5497 //start of the batch
5498 mFirstFrameNumberInBatch = request->frame_number;
5499 }
5500 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5501 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5502 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005503 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005504 return BAD_VALUE;
5505 }
5506 }
5507 if (mNeedSensorRestart) {
5508 /* Unlock the mutex as restartSensor waits on the channels to be
5509 * stopped, which in turn calls stream callback functions -
5510 * handleBufferWithLock and handleMetadataWithLock */
5511 pthread_mutex_unlock(&mMutex);
5512 rc = dynamicUpdateMetaStreamInfo();
5513 if (rc != NO_ERROR) {
5514 LOGE("Restarting the sensor failed");
5515 return BAD_VALUE;
5516 }
5517 mNeedSensorRestart = false;
5518 pthread_mutex_lock(&mMutex);
5519 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005520 if(mResetInstantAEC) {
5521 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5522 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5523 mResetInstantAEC = false;
5524 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005525 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005526 if (request->input_buffer->acquire_fence != -1) {
5527 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5528 close(request->input_buffer->acquire_fence);
5529 if (rc != OK) {
5530 LOGE("input buffer sync wait failed %d", rc);
5531 pthread_mutex_unlock(&mMutex);
5532 return rc;
5533 }
5534 }
5535 }
5536
5537 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5538 mLastCustIntentFrmNum = frameNumber;
5539 }
5540 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005541 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005542 pendingRequestIterator latestRequest;
5543 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005544 pendingRequest.num_buffers = depthRequestPresent ?
5545 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005546 pendingRequest.request_id = request_id;
5547 pendingRequest.blob_request = blob_request;
5548 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005549 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005550 if (request->input_buffer) {
5551 pendingRequest.input_buffer =
5552 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5553 *(pendingRequest.input_buffer) = *(request->input_buffer);
5554 pInputBuffer = pendingRequest.input_buffer;
5555 } else {
5556 pendingRequest.input_buffer = NULL;
5557 pInputBuffer = NULL;
5558 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005559 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005560
5561 pendingRequest.pipeline_depth = 0;
5562 pendingRequest.partial_result_cnt = 0;
5563 extractJpegMetadata(mCurJpegMeta, request);
5564 pendingRequest.jpegMetadata = mCurJpegMeta;
5565 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005566 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005567 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
Shuzhen Wang77b049a2017-08-30 12:24:36 -07005568 pendingRequest.hybrid_ae_enable =
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005569 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5570 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005571
Samuel Ha68ba5172016-12-15 18:41:12 -08005572 /* DevCamDebug metadata processCaptureRequest */
5573 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5574 mDevCamDebugMetaEnable =
5575 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5576 }
5577 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5578 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005579
5580 //extract CAC info
5581 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5582 mCacMode =
5583 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5584 }
5585 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005586 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005587 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5588 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005589
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005590 // extract enableZsl info
5591 if (gExposeEnableZslKey) {
5592 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5593 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5594 mZslEnabled = pendingRequest.enableZsl;
5595 } else {
5596 pendingRequest.enableZsl = mZslEnabled;
5597 }
5598 }
5599
Thierry Strudel3d639192016-09-09 11:52:26 -07005600 PendingBuffersInRequest bufsForCurRequest;
5601 bufsForCurRequest.frame_number = frameNumber;
5602 // Mark current timestamp for the new request
5603 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Binhao Lin09245482017-08-31 18:25:29 -07005604 bufsForCurRequest.av_timestamp = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005605 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005606
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005607 if (hdrPlusRequest) {
5608 // Save settings for this request.
5609 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5610 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5611
5612 // Add to pending HDR+ request queue.
5613 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5614 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5615
5616 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5617 }
5618
Thierry Strudel3d639192016-09-09 11:52:26 -07005619 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005620 if ((request->output_buffers[i].stream->data_space ==
5621 HAL_DATASPACE_DEPTH) &&
5622 (HAL_PIXEL_FORMAT_BLOB ==
5623 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005624 continue;
5625 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005626 RequestedBufferInfo requestedBuf;
5627 memset(&requestedBuf, 0, sizeof(requestedBuf));
5628 requestedBuf.stream = request->output_buffers[i].stream;
5629 requestedBuf.buffer = NULL;
5630 pendingRequest.buffers.push_back(requestedBuf);
5631
5632 // Add to buffer handle the pending buffers list
5633 PendingBufferInfo bufferInfo;
5634 bufferInfo.buffer = request->output_buffers[i].buffer;
5635 bufferInfo.stream = request->output_buffers[i].stream;
5636 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5637 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5638 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5639 frameNumber, bufferInfo.buffer,
5640 channel->getStreamTypeMask(), bufferInfo.stream->format);
5641 }
5642 // Add this request packet into mPendingBuffersMap
5643 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5644 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5645 mPendingBuffersMap.get_num_overall_buffers());
5646
5647 latestRequest = mPendingRequestsList.insert(
5648 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005649
5650 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5651 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005652 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005653 for (size_t i = 0; i < request->num_output_buffers; i++) {
5654 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5655 }
5656
Thierry Strudel3d639192016-09-09 11:52:26 -07005657 if(mFlush) {
5658 LOGI("mFlush is true");
5659 pthread_mutex_unlock(&mMutex);
5660 return NO_ERROR;
5661 }
5662
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005663 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5664 // channel.
5665 if (!hdrPlusRequest) {
5666 int indexUsed;
5667 // Notify metadata channel we receive a request
5668 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005669
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005670 if(request->input_buffer != NULL){
5671 LOGD("Input request, frame_number %d", frameNumber);
5672 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5673 if (NO_ERROR != rc) {
5674 LOGE("fail to set reproc parameters");
5675 pthread_mutex_unlock(&mMutex);
5676 return rc;
5677 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005678 }
5679
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005680 // Call request on other streams
5681 uint32_t streams_need_metadata = 0;
5682 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5683 for (size_t i = 0; i < request->num_output_buffers; i++) {
5684 const camera3_stream_buffer_t& output = request->output_buffers[i];
5685 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5686
5687 if (channel == NULL) {
5688 LOGW("invalid channel pointer for stream");
5689 continue;
5690 }
5691
5692 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5693 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5694 output.buffer, request->input_buffer, frameNumber);
5695 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005696 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005697 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5698 if (rc < 0) {
5699 LOGE("Fail to request on picture channel");
5700 pthread_mutex_unlock(&mMutex);
5701 return rc;
5702 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005703 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005704 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5705 assert(NULL != mDepthChannel);
5706 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005707
Emilian Peev7650c122017-01-19 08:24:33 -08005708 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5709 if (rc < 0) {
5710 LOGE("Fail to map on depth buffer");
5711 pthread_mutex_unlock(&mMutex);
5712 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005713 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005714 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005715 } else {
5716 LOGD("snapshot request with buffer %p, frame_number %d",
5717 output.buffer, frameNumber);
5718 if (!request->settings) {
5719 rc = channel->request(output.buffer, frameNumber,
5720 NULL, mPrevParameters, indexUsed);
5721 } else {
5722 rc = channel->request(output.buffer, frameNumber,
5723 NULL, mParameters, indexUsed);
5724 }
5725 if (rc < 0) {
5726 LOGE("Fail to request on picture channel");
5727 pthread_mutex_unlock(&mMutex);
5728 return rc;
5729 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005730
Emilian Peev7650c122017-01-19 08:24:33 -08005731 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5732 uint32_t j = 0;
5733 for (j = 0; j < streamsArray.num_streams; j++) {
5734 if (streamsArray.stream_request[j].streamID == streamId) {
5735 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5736 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5737 else
5738 streamsArray.stream_request[j].buf_index = indexUsed;
5739 break;
5740 }
5741 }
5742 if (j == streamsArray.num_streams) {
5743 LOGE("Did not find matching stream to update index");
5744 assert(0);
5745 }
5746
5747 pendingBufferIter->need_metadata = true;
5748 streams_need_metadata++;
5749 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005750 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005751 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5752 bool needMetadata = false;
5753 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5754 rc = yuvChannel->request(output.buffer, frameNumber,
5755 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5756 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005757 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005758 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005759 pthread_mutex_unlock(&mMutex);
5760 return rc;
5761 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005762
5763 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5764 uint32_t j = 0;
5765 for (j = 0; j < streamsArray.num_streams; j++) {
5766 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005767 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5768 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5769 else
5770 streamsArray.stream_request[j].buf_index = indexUsed;
5771 break;
5772 }
5773 }
5774 if (j == streamsArray.num_streams) {
5775 LOGE("Did not find matching stream to update index");
5776 assert(0);
5777 }
5778
5779 pendingBufferIter->need_metadata = needMetadata;
5780 if (needMetadata)
5781 streams_need_metadata += 1;
5782 LOGD("calling YUV channel request, need_metadata is %d",
5783 needMetadata);
5784 } else {
5785 LOGD("request with buffer %p, frame_number %d",
5786 output.buffer, frameNumber);
5787
5788 rc = channel->request(output.buffer, frameNumber, indexUsed);
5789
5790 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5791 uint32_t j = 0;
5792 for (j = 0; j < streamsArray.num_streams; j++) {
5793 if (streamsArray.stream_request[j].streamID == streamId) {
5794 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5795 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5796 else
5797 streamsArray.stream_request[j].buf_index = indexUsed;
5798 break;
5799 }
5800 }
5801 if (j == streamsArray.num_streams) {
5802 LOGE("Did not find matching stream to update index");
5803 assert(0);
5804 }
5805
5806 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5807 && mBatchSize) {
5808 mToBeQueuedVidBufs++;
5809 if (mToBeQueuedVidBufs == mBatchSize) {
5810 channel->queueBatchBuf();
5811 }
5812 }
5813 if (rc < 0) {
5814 LOGE("request failed");
5815 pthread_mutex_unlock(&mMutex);
5816 return rc;
5817 }
5818 }
5819 pendingBufferIter++;
5820 }
5821
5822 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5823 itr++) {
5824 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5825
5826 if (channel == NULL) {
5827 LOGE("invalid channel pointer for stream");
5828 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005829 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005830 return BAD_VALUE;
5831 }
5832
5833 InternalRequest requestedStream;
5834 requestedStream = (*itr);
5835
5836
5837 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5838 LOGD("snapshot request internally input buffer %p, frame_number %d",
5839 request->input_buffer, frameNumber);
5840 if(request->input_buffer != NULL){
5841 rc = channel->request(NULL, frameNumber,
5842 pInputBuffer, &mReprocMeta, indexUsed, true,
5843 requestedStream.meteringOnly);
5844 if (rc < 0) {
5845 LOGE("Fail to request on picture channel");
5846 pthread_mutex_unlock(&mMutex);
5847 return rc;
5848 }
5849 } else {
5850 LOGD("snapshot request with frame_number %d", frameNumber);
5851 if (!request->settings) {
5852 rc = channel->request(NULL, frameNumber,
5853 NULL, mPrevParameters, indexUsed, true,
5854 requestedStream.meteringOnly);
5855 } else {
5856 rc = channel->request(NULL, frameNumber,
5857 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5858 }
5859 if (rc < 0) {
5860 LOGE("Fail to request on picture channel");
5861 pthread_mutex_unlock(&mMutex);
5862 return rc;
5863 }
5864
5865 if ((*itr).meteringOnly != 1) {
5866 requestedStream.need_metadata = 1;
5867 streams_need_metadata++;
5868 }
5869 }
5870
5871 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5872 uint32_t j = 0;
5873 for (j = 0; j < streamsArray.num_streams; j++) {
5874 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005875 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5876 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5877 else
5878 streamsArray.stream_request[j].buf_index = indexUsed;
5879 break;
5880 }
5881 }
5882 if (j == streamsArray.num_streams) {
5883 LOGE("Did not find matching stream to update index");
5884 assert(0);
5885 }
5886
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005887 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005888 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005889 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005890 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005891 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005892 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005893 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005894 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005895
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005896 //If 2 streams have need_metadata set to true, fail the request, unless
5897 //we copy/reference count the metadata buffer
5898 if (streams_need_metadata > 1) {
5899 LOGE("not supporting request in which two streams requires"
5900 " 2 HAL metadata for reprocessing");
5901 pthread_mutex_unlock(&mMutex);
5902 return -EINVAL;
5903 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005904
Emilian Peev656e4fa2017-06-02 16:47:04 +01005905 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5906 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5907 if (depthRequestPresent && mDepthChannel) {
5908 if (request->settings) {
5909 camera_metadata_ro_entry entry;
5910 if (find_camera_metadata_ro_entry(request->settings,
5911 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5912 if (entry.data.u8[0]) {
5913 pdafEnable = CAM_PD_DATA_ENABLED;
5914 } else {
5915 pdafEnable = CAM_PD_DATA_SKIP;
5916 }
5917 mDepthCloudMode = pdafEnable;
5918 } else {
5919 pdafEnable = mDepthCloudMode;
5920 }
5921 } else {
5922 pdafEnable = mDepthCloudMode;
5923 }
5924 }
5925
Emilian Peev7650c122017-01-19 08:24:33 -08005926 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5927 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5928 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5929 pthread_mutex_unlock(&mMutex);
5930 return BAD_VALUE;
5931 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005932
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005933 if (request->input_buffer == NULL) {
5934 /* Set the parameters to backend:
5935 * - For every request in NORMAL MODE
5936 * - For every request in HFR mode during preview only case
5937 * - Once every batch in HFR mode during video recording
5938 */
5939 if (!mBatchSize ||
5940 (mBatchSize && !isVidBufRequested) ||
5941 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5942 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5943 mBatchSize, isVidBufRequested,
5944 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005945
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005946 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5947 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5948 uint32_t m = 0;
5949 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5950 if (streamsArray.stream_request[k].streamID ==
5951 mBatchedStreamsArray.stream_request[m].streamID)
5952 break;
5953 }
5954 if (m == mBatchedStreamsArray.num_streams) {
5955 mBatchedStreamsArray.stream_request\
5956 [mBatchedStreamsArray.num_streams].streamID =
5957 streamsArray.stream_request[k].streamID;
5958 mBatchedStreamsArray.stream_request\
5959 [mBatchedStreamsArray.num_streams].buf_index =
5960 streamsArray.stream_request[k].buf_index;
5961 mBatchedStreamsArray.num_streams =
5962 mBatchedStreamsArray.num_streams + 1;
5963 }
5964 }
5965 streamsArray = mBatchedStreamsArray;
5966 }
5967 /* Update stream id of all the requested buffers */
5968 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5969 streamsArray)) {
5970 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005971 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005972 return BAD_VALUE;
5973 }
5974
5975 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5976 mParameters);
5977 if (rc < 0) {
5978 LOGE("set_parms failed");
5979 }
5980 /* reset to zero coz, the batch is queued */
5981 mToBeQueuedVidBufs = 0;
5982 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5983 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5984 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005985 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5986 uint32_t m = 0;
5987 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5988 if (streamsArray.stream_request[k].streamID ==
5989 mBatchedStreamsArray.stream_request[m].streamID)
5990 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005991 }
5992 if (m == mBatchedStreamsArray.num_streams) {
5993 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5994 streamID = streamsArray.stream_request[k].streamID;
5995 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5996 buf_index = streamsArray.stream_request[k].buf_index;
5997 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5998 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005999 }
6000 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006001 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006002
6003 // Start all streams after the first setting is sent, so that the
6004 // setting can be applied sooner: (0 + apply_delay)th frame.
6005 if (mState == CONFIGURED && mChannelHandle) {
6006 //Then start them.
6007 LOGH("Start META Channel");
6008 rc = mMetadataChannel->start();
6009 if (rc < 0) {
6010 LOGE("META channel start failed");
6011 pthread_mutex_unlock(&mMutex);
6012 return rc;
6013 }
6014
6015 if (mAnalysisChannel) {
6016 rc = mAnalysisChannel->start();
6017 if (rc < 0) {
6018 LOGE("Analysis channel start failed");
6019 mMetadataChannel->stop();
6020 pthread_mutex_unlock(&mMutex);
6021 return rc;
6022 }
6023 }
6024
6025 if (mSupportChannel) {
6026 rc = mSupportChannel->start();
6027 if (rc < 0) {
6028 LOGE("Support channel start failed");
6029 mMetadataChannel->stop();
6030 /* Although support and analysis are mutually exclusive today
6031 adding it in anycase for future proofing */
6032 if (mAnalysisChannel) {
6033 mAnalysisChannel->stop();
6034 }
6035 pthread_mutex_unlock(&mMutex);
6036 return rc;
6037 }
6038 }
6039 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6040 it != mStreamInfo.end(); it++) {
6041 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
6042 LOGH("Start Processing Channel mask=%d",
6043 channel->getStreamTypeMask());
6044 rc = channel->start();
6045 if (rc < 0) {
6046 LOGE("channel start failed");
6047 pthread_mutex_unlock(&mMutex);
6048 return rc;
6049 }
6050 }
6051
6052 if (mRawDumpChannel) {
6053 LOGD("Starting raw dump stream");
6054 rc = mRawDumpChannel->start();
6055 if (rc != NO_ERROR) {
6056 LOGE("Error Starting Raw Dump Channel");
6057 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6058 it != mStreamInfo.end(); it++) {
6059 QCamera3Channel *channel =
6060 (QCamera3Channel *)(*it)->stream->priv;
6061 LOGH("Stopping Processing Channel mask=%d",
6062 channel->getStreamTypeMask());
6063 channel->stop();
6064 }
6065 if (mSupportChannel)
6066 mSupportChannel->stop();
6067 if (mAnalysisChannel) {
6068 mAnalysisChannel->stop();
6069 }
6070 mMetadataChannel->stop();
6071 pthread_mutex_unlock(&mMutex);
6072 return rc;
6073 }
6074 }
6075
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006076 // Configure modules for stream on.
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006077 rc = startChannelLocked();
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006078 if (rc != NO_ERROR) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006079 LOGE("startChannelLocked failed %d", rc);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006080 pthread_mutex_unlock(&mMutex);
6081 return rc;
6082 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006083 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006084 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006085 }
6086
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -07006087 // Enable HDR+ mode for the first PREVIEW_INTENT request that doesn't disable HDR+.
Chenjie Luo4a761802017-06-13 17:35:54 +00006088 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006089 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006090 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006091 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6092 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6093 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -07006094 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW &&
6095 meta.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
6096 meta.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 0) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006097
6098 if (isSessionHdrPlusModeCompatible()) {
6099 rc = enableHdrPlusModeLocked();
6100 if (rc != OK) {
6101 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6102 pthread_mutex_unlock(&mMutex);
6103 return rc;
6104 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006105 }
6106
6107 mFirstPreviewIntentSeen = true;
6108 }
6109 }
6110
Thierry Strudel3d639192016-09-09 11:52:26 -07006111 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6112
6113 mState = STARTED;
6114 // Added a timed condition wait
6115 struct timespec ts;
6116 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006117 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006118 if (rc < 0) {
6119 isValidTimeout = 0;
6120 LOGE("Error reading the real time clock!!");
6121 }
6122 else {
6123 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006124 int64_t timeout = 5;
6125 {
6126 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6127 // If there is a pending HDR+ request, the following requests may be blocked until the
6128 // HDR+ request is done. So allow a longer timeout.
6129 if (mHdrPlusPendingRequests.size() > 0) {
6130 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6131 }
6132 }
6133 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006134 }
6135 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006136 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006137 (mState != ERROR) && (mState != DEINIT)) {
6138 if (!isValidTimeout) {
6139 LOGD("Blocking on conditional wait");
6140 pthread_cond_wait(&mRequestCond, &mMutex);
6141 }
6142 else {
6143 LOGD("Blocking on timed conditional wait");
6144 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6145 if (rc == ETIMEDOUT) {
6146 rc = -ENODEV;
6147 LOGE("Unblocked on timeout!!!!");
6148 break;
6149 }
6150 }
6151 LOGD("Unblocked");
6152 if (mWokenUpByDaemon) {
6153 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006154 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006155 break;
6156 }
6157 }
6158 pthread_mutex_unlock(&mMutex);
6159
6160 return rc;
6161}
6162
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006163int32_t QCamera3HardwareInterface::startChannelLocked()
6164{
6165 // Configure modules for stream on.
6166 int32_t rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6167 mChannelHandle, /*start_sensor_streaming*/false);
6168 if (rc != NO_ERROR) {
6169 LOGE("start_channel failed %d", rc);
6170 return rc;
6171 }
6172
6173 {
6174 // Configure Easel for stream on.
6175 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6176
6177 // Now that sensor mode should have been selected, get the selected sensor mode
6178 // info.
6179 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6180 getCurrentSensorModeInfo(mSensorModeInfo);
6181
6182 if (EaselManagerClientOpened) {
6183 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
6184 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6185 /*enableCapture*/true);
6186 if (rc != OK) {
6187 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6188 mCameraId, mSensorModeInfo.op_pixel_clk);
6189 return rc;
6190 }
6191 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
6192 mEaselMipiStarted = true;
6193 }
6194 }
6195
6196 // Start sensor streaming.
6197 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6198 mChannelHandle);
6199 if (rc != NO_ERROR) {
6200 LOGE("start_sensor_stream_on failed %d", rc);
6201 return rc;
6202 }
6203
6204 return 0;
6205}
6206
6207void QCamera3HardwareInterface::stopChannelLocked(bool stopChannelImmediately)
6208{
6209 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6210 mChannelHandle, stopChannelImmediately);
6211
6212 {
6213 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6214 if (EaselManagerClientOpened && mEaselMipiStarted) {
6215 int32_t rc = gEaselManagerClient->stopMipi(mCameraId);
6216 if (rc != 0) {
6217 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
6218 }
6219 mEaselMipiStarted = false;
6220 }
6221 }
6222}
6223
Thierry Strudel3d639192016-09-09 11:52:26 -07006224/*===========================================================================
6225 * FUNCTION : dump
6226 *
6227 * DESCRIPTION:
6228 *
6229 * PARAMETERS :
6230 *
6231 *
6232 * RETURN :
6233 *==========================================================================*/
6234void QCamera3HardwareInterface::dump(int fd)
6235{
6236 pthread_mutex_lock(&mMutex);
6237 dprintf(fd, "\n Camera HAL3 information Begin \n");
6238
6239 dprintf(fd, "\nNumber of pending requests: %zu \n",
6240 mPendingRequestsList.size());
6241 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6242 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6243 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6244 for(pendingRequestIterator i = mPendingRequestsList.begin();
6245 i != mPendingRequestsList.end(); i++) {
6246 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6247 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6248 i->input_buffer);
6249 }
6250 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6251 mPendingBuffersMap.get_num_overall_buffers());
6252 dprintf(fd, "-------+------------------\n");
6253 dprintf(fd, " Frame | Stream type mask \n");
6254 dprintf(fd, "-------+------------------\n");
6255 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6256 for(auto &j : req.mPendingBufferList) {
6257 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6258 dprintf(fd, " %5d | %11d \n",
6259 req.frame_number, channel->getStreamTypeMask());
6260 }
6261 }
6262 dprintf(fd, "-------+------------------\n");
6263
6264 dprintf(fd, "\nPending frame drop list: %zu\n",
6265 mPendingFrameDropList.size());
6266 dprintf(fd, "-------+-----------\n");
6267 dprintf(fd, " Frame | Stream ID \n");
6268 dprintf(fd, "-------+-----------\n");
6269 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6270 i != mPendingFrameDropList.end(); i++) {
6271 dprintf(fd, " %5d | %9d \n",
6272 i->frame_number, i->stream_ID);
6273 }
6274 dprintf(fd, "-------+-----------\n");
6275
6276 dprintf(fd, "\n Camera HAL3 information End \n");
6277
6278 /* use dumpsys media.camera as trigger to send update debug level event */
6279 mUpdateDebugLevel = true;
6280 pthread_mutex_unlock(&mMutex);
6281 return;
6282}
6283
6284/*===========================================================================
6285 * FUNCTION : flush
6286 *
6287 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6288 * conditionally restarts channels
6289 *
6290 * PARAMETERS :
6291 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006292 * @ stopChannelImmediately: stop the channel immediately. This should be used
6293 * when device encountered an error and MIPI may has
6294 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006295 *
6296 * RETURN :
6297 * 0 on success
6298 * Error code on failure
6299 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006300int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006301{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006302 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006303 int32_t rc = NO_ERROR;
6304
6305 LOGD("Unblocking Process Capture Request");
6306 pthread_mutex_lock(&mMutex);
6307 mFlush = true;
6308 pthread_mutex_unlock(&mMutex);
6309
Chien-Yu Chen11c8edc2017-09-11 20:54:24 -07006310 // Disable HDR+ if it's enabled;
6311 {
6312 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6313 finishHdrPlusClientOpeningLocked(l);
6314 disableHdrPlusModeLocked();
6315 }
6316
Thierry Strudel3d639192016-09-09 11:52:26 -07006317 rc = stopAllChannels();
6318 // unlink of dualcam
6319 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006320 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6321 &m_pDualCamCmdPtr->bundle_info;
6322 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006323 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6324 pthread_mutex_lock(&gCamLock);
6325
6326 if (mIsMainCamera == 1) {
6327 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6328 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006329 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006330 // related session id should be session id of linked session
6331 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6332 } else {
6333 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6334 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006335 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006336 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6337 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006338 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006339 pthread_mutex_unlock(&gCamLock);
6340
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006341 rc = mCameraHandle->ops->set_dual_cam_cmd(
6342 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006343 if (rc < 0) {
6344 LOGE("Dualcam: Unlink failed, but still proceed to close");
6345 }
6346 }
6347
6348 if (rc < 0) {
6349 LOGE("stopAllChannels failed");
6350 return rc;
6351 }
6352 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006353 stopChannelLocked(stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006354 }
6355
6356 // Reset bundle info
6357 rc = setBundleInfo();
6358 if (rc < 0) {
6359 LOGE("setBundleInfo failed %d", rc);
6360 return rc;
6361 }
6362
6363 // Mutex Lock
6364 pthread_mutex_lock(&mMutex);
6365
6366 // Unblock process_capture_request
6367 mPendingLiveRequest = 0;
6368 pthread_cond_signal(&mRequestCond);
6369
6370 rc = notifyErrorForPendingRequests();
6371 if (rc < 0) {
6372 LOGE("notifyErrorForPendingRequests failed");
6373 pthread_mutex_unlock(&mMutex);
6374 return rc;
6375 }
6376
6377 mFlush = false;
6378
6379 // Start the Streams/Channels
6380 if (restartChannels) {
6381 rc = startAllChannels();
6382 if (rc < 0) {
6383 LOGE("startAllChannels failed");
6384 pthread_mutex_unlock(&mMutex);
6385 return rc;
6386 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006387 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006388 // Configure modules for stream on.
6389 rc = startChannelLocked();
Thierry Strudel2896d122017-02-23 19:18:03 -08006390 if (rc < 0) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006391 LOGE("startChannelLocked failed");
Thierry Strudel2896d122017-02-23 19:18:03 -08006392 pthread_mutex_unlock(&mMutex);
6393 return rc;
6394 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006395 }
6396 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006397 pthread_mutex_unlock(&mMutex);
6398
6399 return 0;
6400}
6401
6402/*===========================================================================
6403 * FUNCTION : flushPerf
6404 *
6405 * DESCRIPTION: This is the performance optimization version of flush that does
6406 * not use stream off, rather flushes the system
6407 *
6408 * PARAMETERS :
6409 *
6410 *
6411 * RETURN : 0 : success
6412 * -EINVAL: input is malformed (device is not valid)
6413 * -ENODEV: if the device has encountered a serious error
6414 *==========================================================================*/
6415int QCamera3HardwareInterface::flushPerf()
6416{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006417 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006418 int32_t rc = 0;
6419 struct timespec timeout;
6420 bool timed_wait = false;
6421
6422 pthread_mutex_lock(&mMutex);
6423 mFlushPerf = true;
6424 mPendingBuffersMap.numPendingBufsAtFlush =
6425 mPendingBuffersMap.get_num_overall_buffers();
6426 LOGD("Calling flush. Wait for %d buffers to return",
6427 mPendingBuffersMap.numPendingBufsAtFlush);
6428
6429 /* send the flush event to the backend */
6430 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6431 if (rc < 0) {
6432 LOGE("Error in flush: IOCTL failure");
6433 mFlushPerf = false;
6434 pthread_mutex_unlock(&mMutex);
6435 return -ENODEV;
6436 }
6437
6438 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6439 LOGD("No pending buffers in HAL, return flush");
6440 mFlushPerf = false;
6441 pthread_mutex_unlock(&mMutex);
6442 return rc;
6443 }
6444
6445 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006446 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006447 if (rc < 0) {
6448 LOGE("Error reading the real time clock, cannot use timed wait");
6449 } else {
6450 timeout.tv_sec += FLUSH_TIMEOUT;
6451 timed_wait = true;
6452 }
6453
6454 //Block on conditional variable
6455 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6456 LOGD("Waiting on mBuffersCond");
6457 if (!timed_wait) {
6458 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6459 if (rc != 0) {
6460 LOGE("pthread_cond_wait failed due to rc = %s",
6461 strerror(rc));
6462 break;
6463 }
6464 } else {
6465 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6466 if (rc != 0) {
6467 LOGE("pthread_cond_timedwait failed due to rc = %s",
6468 strerror(rc));
6469 break;
6470 }
6471 }
6472 }
6473 if (rc != 0) {
6474 mFlushPerf = false;
6475 pthread_mutex_unlock(&mMutex);
6476 return -ENODEV;
6477 }
6478
6479 LOGD("Received buffers, now safe to return them");
6480
6481 //make sure the channels handle flush
6482 //currently only required for the picture channel to release snapshot resources
6483 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6484 it != mStreamInfo.end(); it++) {
6485 QCamera3Channel *channel = (*it)->channel;
6486 if (channel) {
6487 rc = channel->flush();
6488 if (rc) {
6489 LOGE("Flushing the channels failed with error %d", rc);
6490 // even though the channel flush failed we need to continue and
6491 // return the buffers we have to the framework, however the return
6492 // value will be an error
6493 rc = -ENODEV;
6494 }
6495 }
6496 }
6497
6498 /* notify the frameworks and send errored results */
6499 rc = notifyErrorForPendingRequests();
6500 if (rc < 0) {
6501 LOGE("notifyErrorForPendingRequests failed");
6502 pthread_mutex_unlock(&mMutex);
6503 return rc;
6504 }
6505
6506 //unblock process_capture_request
6507 mPendingLiveRequest = 0;
6508 unblockRequestIfNecessary();
6509
6510 mFlushPerf = false;
6511 pthread_mutex_unlock(&mMutex);
6512 LOGD ("Flush Operation complete. rc = %d", rc);
6513 return rc;
6514}
6515
6516/*===========================================================================
6517 * FUNCTION : handleCameraDeviceError
6518 *
6519 * DESCRIPTION: This function calls internal flush and notifies the error to
6520 * framework and updates the state variable.
6521 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006522 * PARAMETERS :
6523 * @stopChannelImmediately : stop channels immediately without waiting for
6524 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006525 *
6526 * RETURN : NO_ERROR on Success
6527 * Error code on failure
6528 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006529int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006530{
6531 int32_t rc = NO_ERROR;
6532
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006533 {
6534 Mutex::Autolock lock(mFlushLock);
6535 pthread_mutex_lock(&mMutex);
6536 if (mState != ERROR) {
6537 //if mState != ERROR, nothing to be done
6538 pthread_mutex_unlock(&mMutex);
6539 return NO_ERROR;
6540 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006541 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006542
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006543 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006544 if (NO_ERROR != rc) {
6545 LOGE("internal flush to handle mState = ERROR failed");
6546 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006547
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006548 pthread_mutex_lock(&mMutex);
6549 mState = DEINIT;
6550 pthread_mutex_unlock(&mMutex);
6551 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006552
6553 camera3_notify_msg_t notify_msg;
6554 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6555 notify_msg.type = CAMERA3_MSG_ERROR;
6556 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6557 notify_msg.message.error.error_stream = NULL;
6558 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006559 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006560
6561 return rc;
6562}
6563
6564/*===========================================================================
6565 * FUNCTION : captureResultCb
6566 *
6567 * DESCRIPTION: Callback handler for all capture result
6568 * (streams, as well as metadata)
6569 *
6570 * PARAMETERS :
6571 * @metadata : metadata information
6572 * @buffer : actual gralloc buffer to be returned to frameworks.
6573 * NULL if metadata.
6574 *
6575 * RETURN : NONE
6576 *==========================================================================*/
6577void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6578 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6579{
6580 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006581 pthread_mutex_lock(&mMutex);
6582 uint8_t batchSize = mBatchSize;
6583 pthread_mutex_unlock(&mMutex);
6584 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006585 handleBatchMetadata(metadata_buf,
6586 true /* free_and_bufdone_meta_buf */);
6587 } else { /* mBatchSize = 0 */
6588 hdrPlusPerfLock(metadata_buf);
6589 pthread_mutex_lock(&mMutex);
6590 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006591 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006592 true /* last urgent frame of batch metadata */,
6593 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006594 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006595 pthread_mutex_unlock(&mMutex);
6596 }
6597 } else if (isInputBuffer) {
6598 pthread_mutex_lock(&mMutex);
6599 handleInputBufferWithLock(frame_number);
6600 pthread_mutex_unlock(&mMutex);
6601 } else {
6602 pthread_mutex_lock(&mMutex);
6603 handleBufferWithLock(buffer, frame_number);
6604 pthread_mutex_unlock(&mMutex);
6605 }
6606 return;
6607}
6608
6609/*===========================================================================
6610 * FUNCTION : getReprocessibleOutputStreamId
6611 *
6612 * DESCRIPTION: Get source output stream id for the input reprocess stream
6613 * based on size and format, which would be the largest
6614 * output stream if an input stream exists.
6615 *
6616 * PARAMETERS :
6617 * @id : return the stream id if found
6618 *
6619 * RETURN : int32_t type of status
6620 * NO_ERROR -- success
6621 * none-zero failure code
6622 *==========================================================================*/
6623int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6624{
6625 /* check if any output or bidirectional stream with the same size and format
6626 and return that stream */
6627 if ((mInputStreamInfo.dim.width > 0) &&
6628 (mInputStreamInfo.dim.height > 0)) {
6629 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6630 it != mStreamInfo.end(); it++) {
6631
6632 camera3_stream_t *stream = (*it)->stream;
6633 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6634 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6635 (stream->format == mInputStreamInfo.format)) {
6636 // Usage flag for an input stream and the source output stream
6637 // may be different.
6638 LOGD("Found reprocessible output stream! %p", *it);
6639 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6640 stream->usage, mInputStreamInfo.usage);
6641
6642 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6643 if (channel != NULL && channel->mStreams[0]) {
6644 id = channel->mStreams[0]->getMyServerID();
6645 return NO_ERROR;
6646 }
6647 }
6648 }
6649 } else {
6650 LOGD("No input stream, so no reprocessible output stream");
6651 }
6652 return NAME_NOT_FOUND;
6653}
6654
6655/*===========================================================================
6656 * FUNCTION : lookupFwkName
6657 *
6658 * DESCRIPTION: In case the enum is not same in fwk and backend
6659 * make sure the parameter is correctly propogated
6660 *
6661 * PARAMETERS :
6662 * @arr : map between the two enums
6663 * @len : len of the map
6664 * @hal_name : name of the hal_parm to map
6665 *
6666 * RETURN : int type of status
6667 * fwk_name -- success
6668 * none-zero failure code
6669 *==========================================================================*/
6670template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6671 size_t len, halType hal_name)
6672{
6673
6674 for (size_t i = 0; i < len; i++) {
6675 if (arr[i].hal_name == hal_name) {
6676 return arr[i].fwk_name;
6677 }
6678 }
6679
6680 /* Not able to find matching framework type is not necessarily
6681 * an error case. This happens when mm-camera supports more attributes
6682 * than the frameworks do */
6683 LOGH("Cannot find matching framework type");
6684 return NAME_NOT_FOUND;
6685}
6686
6687/*===========================================================================
6688 * FUNCTION : lookupHalName
6689 *
6690 * DESCRIPTION: In case the enum is not same in fwk and backend
6691 * make sure the parameter is correctly propogated
6692 *
6693 * PARAMETERS :
6694 * @arr : map between the two enums
6695 * @len : len of the map
6696 * @fwk_name : name of the hal_parm to map
6697 *
6698 * RETURN : int32_t type of status
6699 * hal_name -- success
6700 * none-zero failure code
6701 *==========================================================================*/
6702template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6703 size_t len, fwkType fwk_name)
6704{
6705 for (size_t i = 0; i < len; i++) {
6706 if (arr[i].fwk_name == fwk_name) {
6707 return arr[i].hal_name;
6708 }
6709 }
6710
6711 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6712 return NAME_NOT_FOUND;
6713}
6714
6715/*===========================================================================
6716 * FUNCTION : lookupProp
6717 *
6718 * DESCRIPTION: lookup a value by its name
6719 *
6720 * PARAMETERS :
6721 * @arr : map between the two enums
6722 * @len : size of the map
6723 * @name : name to be looked up
6724 *
6725 * RETURN : Value if found
6726 * CAM_CDS_MODE_MAX if not found
6727 *==========================================================================*/
6728template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6729 size_t len, const char *name)
6730{
6731 if (name) {
6732 for (size_t i = 0; i < len; i++) {
6733 if (!strcmp(arr[i].desc, name)) {
6734 return arr[i].val;
6735 }
6736 }
6737 }
6738 return CAM_CDS_MODE_MAX;
6739}
6740
6741/*===========================================================================
6742 *
6743 * DESCRIPTION:
6744 *
6745 * PARAMETERS :
6746 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006747 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006748 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006749 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6750 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006751 *
6752 * RETURN : camera_metadata_t*
6753 * metadata in a format specified by fwk
6754 *==========================================================================*/
6755camera_metadata_t*
6756QCamera3HardwareInterface::translateFromHalMetadata(
6757 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006758 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006759 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006760 bool lastMetadataInBatch,
6761 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006762{
6763 CameraMetadata camMetadata;
6764 camera_metadata_t *resultMetadata;
6765
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006766 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006767 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6768 * Timestamp is needed because it's used for shutter notify calculation.
6769 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006770 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006771 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006772 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006773 }
6774
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006775 if (pendingRequest.jpegMetadata.entryCount())
6776 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006777
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006778 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6779 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6780 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6781 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6782 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006783 if (mBatchSize == 0) {
6784 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006785 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006786 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006787
Samuel Ha68ba5172016-12-15 18:41:12 -08006788 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6789 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006790 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006791 // DevCamDebug metadata translateFromHalMetadata AF
6792 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6793 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6794 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6795 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6796 }
6797 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07006798 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006799 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6800 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6801 }
6802 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07006803 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006804 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6805 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6806 }
6807 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6808 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6809 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6810 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6811 }
6812 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6813 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6814 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6815 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6816 }
6817 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6818 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6819 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6820 *DevCamDebug_af_monitor_pdaf_target_pos;
6821 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6822 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6823 }
6824 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6825 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6826 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6827 *DevCamDebug_af_monitor_pdaf_confidence;
6828 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6829 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6830 }
6831 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6832 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6833 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6834 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6835 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6836 }
6837 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6838 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6839 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6840 *DevCamDebug_af_monitor_tof_target_pos;
6841 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6842 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6843 }
6844 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6845 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6846 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6847 *DevCamDebug_af_monitor_tof_confidence;
6848 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6849 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6850 }
6851 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6852 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6853 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6854 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6855 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6856 }
6857 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6858 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6859 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6860 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6861 &fwk_DevCamDebug_af_monitor_type_select, 1);
6862 }
6863 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6864 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6865 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6866 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6867 &fwk_DevCamDebug_af_monitor_refocus, 1);
6868 }
6869 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6870 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6871 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6872 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6873 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6874 }
6875 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6876 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6877 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6878 *DevCamDebug_af_search_pdaf_target_pos;
6879 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6880 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6881 }
6882 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6883 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6884 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6885 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6886 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6887 }
6888 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6889 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6890 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6891 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6892 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6893 }
6894 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6895 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6896 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6897 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6898 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6899 }
6900 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6901 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6902 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6903 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6904 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6905 }
6906 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6907 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6908 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6909 *DevCamDebug_af_search_tof_target_pos;
6910 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6911 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6912 }
6913 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6914 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6915 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6916 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6917 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6918 }
6919 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6920 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6921 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6922 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6923 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6924 }
6925 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6926 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6927 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6928 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6929 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6930 }
6931 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6932 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6933 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6934 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6935 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6936 }
6937 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6938 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6939 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6940 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6941 &fwk_DevCamDebug_af_search_type_select, 1);
6942 }
6943 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6944 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6945 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6946 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6947 &fwk_DevCamDebug_af_search_next_pos, 1);
6948 }
6949 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6950 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6951 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6952 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6953 &fwk_DevCamDebug_af_search_target_pos, 1);
6954 }
6955 // DevCamDebug metadata translateFromHalMetadata AEC
6956 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6957 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6958 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6959 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6960 }
6961 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6962 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6963 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6964 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6965 }
6966 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6967 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6968 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6969 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6970 }
6971 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6972 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6973 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6974 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6975 }
6976 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6977 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6978 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6979 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6980 }
6981 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6982 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6983 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6984 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6985 }
6986 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6987 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6988 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6989 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6990 }
6991 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6992 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6993 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6994 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6995 }
Samuel Ha34229982017-02-17 13:51:11 -08006996 // DevCamDebug metadata translateFromHalMetadata zzHDR
6997 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6998 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6999 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
7000 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
7001 }
7002 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
7003 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07007004 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08007005 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
7006 }
7007 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
7008 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
7009 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
7010 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
7011 }
7012 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
7013 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07007014 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08007015 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
7016 }
7017 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
7018 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
7019 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
7020 *DevCamDebug_aec_hdr_sensitivity_ratio;
7021 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
7022 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
7023 }
7024 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
7025 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
7026 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
7027 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
7028 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
7029 }
7030 // DevCamDebug metadata translateFromHalMetadata ADRC
7031 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
7032 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
7033 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
7034 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
7035 &fwk_DevCamDebug_aec_total_drc_gain, 1);
7036 }
7037 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
7038 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
7039 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
7040 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
7041 &fwk_DevCamDebug_aec_color_drc_gain, 1);
7042 }
7043 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
7044 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
7045 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
7046 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
7047 }
7048 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
7049 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
7050 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
7051 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
7052 }
7053 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
7054 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
7055 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
7056 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
7057 }
7058 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
7059 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
7060 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
7061 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
7062 }
Samuel Habdf4fac2017-07-28 17:21:18 -07007063 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
7064 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
7065 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
7066 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
7067 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
7068 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
7069 }
7070 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
7071 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
7072 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
7073 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
7074 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
7075 }
7076 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
7077 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
7078 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
7079 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
7080 &fwk_DevCamDebug_aec_subject_motion, 1);
7081 }
Samuel Ha68ba5172016-12-15 18:41:12 -08007082 // DevCamDebug metadata translateFromHalMetadata AWB
7083 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
7084 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
7085 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
7086 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
7087 }
7088 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7089 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7090 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7091 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7092 }
7093 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7094 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7095 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7096 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7097 }
7098 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7099 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7100 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7101 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7102 }
7103 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7104 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7105 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7106 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7107 }
7108 }
7109 // atrace_end(ATRACE_TAG_ALWAYS);
7110
Thierry Strudel3d639192016-09-09 11:52:26 -07007111 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7112 int64_t fwk_frame_number = *frame_number;
7113 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7114 }
7115
7116 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7117 int32_t fps_range[2];
7118 fps_range[0] = (int32_t)float_range->min_fps;
7119 fps_range[1] = (int32_t)float_range->max_fps;
7120 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7121 fps_range, 2);
7122 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7123 fps_range[0], fps_range[1]);
7124 }
7125
7126 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7127 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7128 }
7129
7130 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7131 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7132 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7133 *sceneMode);
7134 if (NAME_NOT_FOUND != val) {
7135 uint8_t fwkSceneMode = (uint8_t)val;
7136 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7137 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7138 fwkSceneMode);
7139 }
7140 }
7141
7142 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7143 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7144 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7145 }
7146
7147 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7148 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7149 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7150 }
7151
7152 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7153 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7154 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7155 }
7156
7157 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7158 CAM_INTF_META_EDGE_MODE, metadata) {
7159 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7160 }
7161
7162 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7163 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7164 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7165 }
7166
7167 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7168 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7169 }
7170
7171 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7172 if (0 <= *flashState) {
7173 uint8_t fwk_flashState = (uint8_t) *flashState;
7174 if (!gCamCapability[mCameraId]->flash_available) {
7175 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7176 }
7177 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7178 }
7179 }
7180
7181 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7182 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7183 if (NAME_NOT_FOUND != val) {
7184 uint8_t fwk_flashMode = (uint8_t)val;
7185 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7186 }
7187 }
7188
7189 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7190 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7191 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7192 }
7193
7194 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7195 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7196 }
7197
7198 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7199 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7200 }
7201
7202 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7203 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7204 }
7205
7206 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7207 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7208 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7209 }
7210
7211 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7212 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7213 LOGD("fwk_videoStab = %d", fwk_videoStab);
7214 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7215 } else {
7216 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7217 // and so hardcoding the Video Stab result to OFF mode.
7218 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7219 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007220 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007221 }
7222
7223 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7224 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7225 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7226 }
7227
7228 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7229 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7230 }
7231
Thierry Strudel3d639192016-09-09 11:52:26 -07007232 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7233 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007234 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007235
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007236 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7237 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007238
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007239 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007240 blackLevelAppliedPattern->cam_black_level[0],
7241 blackLevelAppliedPattern->cam_black_level[1],
7242 blackLevelAppliedPattern->cam_black_level[2],
7243 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007244 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7245 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007246
7247#ifndef USE_HAL_3_3
7248 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307249 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007250 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307251 fwk_blackLevelInd[0] /= 16.0;
7252 fwk_blackLevelInd[1] /= 16.0;
7253 fwk_blackLevelInd[2] /= 16.0;
7254 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007255 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7256 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007257#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007258 }
7259
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007260#ifndef USE_HAL_3_3
7261 // Fixed whitelevel is used by ISP/Sensor
7262 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7263 &gCamCapability[mCameraId]->white_level, 1);
7264#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007265
7266 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7267 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7268 int32_t scalerCropRegion[4];
7269 scalerCropRegion[0] = hScalerCropRegion->left;
7270 scalerCropRegion[1] = hScalerCropRegion->top;
7271 scalerCropRegion[2] = hScalerCropRegion->width;
7272 scalerCropRegion[3] = hScalerCropRegion->height;
7273
7274 // Adjust crop region from sensor output coordinate system to active
7275 // array coordinate system.
7276 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7277 scalerCropRegion[2], scalerCropRegion[3]);
7278
7279 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7280 }
7281
7282 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7283 LOGD("sensorExpTime = %lld", *sensorExpTime);
7284 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7285 }
7286
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007287 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7288 LOGD("expTimeBoost = %f", *expTimeBoost);
7289 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7290 }
7291
Thierry Strudel3d639192016-09-09 11:52:26 -07007292 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7293 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7294 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7295 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7296 }
7297
7298 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7299 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7300 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7301 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7302 sensorRollingShutterSkew, 1);
7303 }
7304
7305 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7306 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7307 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7308
7309 //calculate the noise profile based on sensitivity
7310 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7311 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7312 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7313 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7314 noise_profile[i] = noise_profile_S;
7315 noise_profile[i+1] = noise_profile_O;
7316 }
7317 LOGD("noise model entry (S, O) is (%f, %f)",
7318 noise_profile_S, noise_profile_O);
7319 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7320 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7321 }
7322
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007323#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007324 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007325 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007326 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007327 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007328 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7329 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7330 }
7331 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007332#endif
7333
Thierry Strudel3d639192016-09-09 11:52:26 -07007334 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7335 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7336 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7337 }
7338
7339 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7340 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7341 *faceDetectMode);
7342 if (NAME_NOT_FOUND != val) {
7343 uint8_t fwk_faceDetectMode = (uint8_t)val;
7344 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7345
7346 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7347 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7348 CAM_INTF_META_FACE_DETECTION, metadata) {
7349 uint8_t numFaces = MIN(
7350 faceDetectionInfo->num_faces_detected, MAX_ROI);
7351 int32_t faceIds[MAX_ROI];
7352 uint8_t faceScores[MAX_ROI];
7353 int32_t faceRectangles[MAX_ROI * 4];
7354 int32_t faceLandmarks[MAX_ROI * 6];
7355 size_t j = 0, k = 0;
7356
7357 for (size_t i = 0; i < numFaces; i++) {
7358 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7359 // Adjust crop region from sensor output coordinate system to active
7360 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007361 cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
Thierry Strudel3d639192016-09-09 11:52:26 -07007362 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7363 rect.width, rect.height);
7364
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007365 convertToRegions(rect, faceRectangles+j, -1);
Thierry Strudel3d639192016-09-09 11:52:26 -07007366
Jason Lee8ce36fa2017-04-19 19:40:37 -07007367 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7368 "bottom-right (%d, %d)",
7369 faceDetectionInfo->frame_id, i,
7370 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7371 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7372
Thierry Strudel3d639192016-09-09 11:52:26 -07007373 j+= 4;
7374 }
7375 if (numFaces <= 0) {
7376 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7377 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7378 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7379 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7380 }
7381
7382 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7383 numFaces);
7384 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7385 faceRectangles, numFaces * 4U);
7386 if (fwk_faceDetectMode ==
7387 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7388 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7389 CAM_INTF_META_FACE_LANDMARK, metadata) {
7390
7391 for (size_t i = 0; i < numFaces; i++) {
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007392 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
Thierry Strudel3d639192016-09-09 11:52:26 -07007393 // Map the co-ordinate sensor output coordinate system to active
7394 // array coordinate system.
7395 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007396 face_landmarks.left_eye_center.x,
7397 face_landmarks.left_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007398 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007399 face_landmarks.right_eye_center.x,
7400 face_landmarks.right_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007401 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007402 face_landmarks.mouth_center.x,
7403 face_landmarks.mouth_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007404
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007405 convertLandmarks(face_landmarks, faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007406
7407 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7408 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7409 faceDetectionInfo->frame_id, i,
7410 faceLandmarks[k + LEFT_EYE_X],
7411 faceLandmarks[k + LEFT_EYE_Y],
7412 faceLandmarks[k + RIGHT_EYE_X],
7413 faceLandmarks[k + RIGHT_EYE_Y],
7414 faceLandmarks[k + MOUTH_X],
7415 faceLandmarks[k + MOUTH_Y]);
7416
Thierry Strudel04e026f2016-10-10 11:27:36 -07007417 k+= TOTAL_LANDMARK_INDICES;
7418 }
7419 } else {
7420 for (size_t i = 0; i < numFaces; i++) {
7421 setInvalidLandmarks(faceLandmarks+k);
7422 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007423 }
7424 }
7425
Jason Lee49619db2017-04-13 12:07:22 -07007426 for (size_t i = 0; i < numFaces; i++) {
7427 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7428
7429 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7430 faceDetectionInfo->frame_id, i, faceIds[i]);
7431 }
7432
Thierry Strudel3d639192016-09-09 11:52:26 -07007433 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7434 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7435 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007436 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007437 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7438 CAM_INTF_META_FACE_BLINK, metadata) {
7439 uint8_t detected[MAX_ROI];
7440 uint8_t degree[MAX_ROI * 2];
7441 for (size_t i = 0; i < numFaces; i++) {
7442 detected[i] = blinks->blink[i].blink_detected;
7443 degree[2 * i] = blinks->blink[i].left_blink;
7444 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007445
Jason Lee49619db2017-04-13 12:07:22 -07007446 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7447 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7448 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7449 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007450 }
7451 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7452 detected, numFaces);
7453 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7454 degree, numFaces * 2);
7455 }
7456 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7457 CAM_INTF_META_FACE_SMILE, metadata) {
7458 uint8_t degree[MAX_ROI];
7459 uint8_t confidence[MAX_ROI];
7460 for (size_t i = 0; i < numFaces; i++) {
7461 degree[i] = smiles->smile[i].smile_degree;
7462 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007463
Jason Lee49619db2017-04-13 12:07:22 -07007464 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7465 "smile_degree=%d, smile_score=%d",
7466 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007467 }
7468 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7469 degree, numFaces);
7470 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7471 confidence, numFaces);
7472 }
7473 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7474 CAM_INTF_META_FACE_GAZE, metadata) {
7475 int8_t angle[MAX_ROI];
7476 int32_t direction[MAX_ROI * 3];
7477 int8_t degree[MAX_ROI * 2];
7478 for (size_t i = 0; i < numFaces; i++) {
7479 angle[i] = gazes->gaze[i].gaze_angle;
7480 direction[3 * i] = gazes->gaze[i].updown_dir;
7481 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7482 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7483 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7484 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007485
7486 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7487 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7488 "left_right_gaze=%d, top_bottom_gaze=%d",
7489 faceDetectionInfo->frame_id, i, angle[i],
7490 direction[3 * i], direction[3 * i + 1],
7491 direction[3 * i + 2],
7492 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007493 }
7494 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7495 (uint8_t *)angle, numFaces);
7496 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7497 direction, numFaces * 3);
7498 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7499 (uint8_t *)degree, numFaces * 2);
7500 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007501 }
7502 }
7503 }
7504 }
7505
7506 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7507 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007508 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007509 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007510 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007511
Shuzhen Wang14415f52016-11-16 18:26:18 -08007512 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7513 histogramBins = *histBins;
7514 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7515 }
7516
7517 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007518 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7519 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007520 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007521
7522 switch (stats_data->type) {
7523 case CAM_HISTOGRAM_TYPE_BAYER:
7524 switch (stats_data->bayer_stats.data_type) {
7525 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007526 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7527 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007528 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007529 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7530 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007531 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007532 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7533 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007534 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007535 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007536 case CAM_STATS_CHANNEL_R:
7537 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007538 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7539 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007540 }
7541 break;
7542 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007543 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007544 break;
7545 }
7546
Shuzhen Wang14415f52016-11-16 18:26:18 -08007547 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007548 }
7549 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007550 }
7551
7552 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7553 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7554 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7555 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7556 }
7557
7558 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7559 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7560 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7561 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7562 }
7563
7564 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7565 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7566 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7567 CAM_MAX_SHADING_MAP_HEIGHT);
7568 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7569 CAM_MAX_SHADING_MAP_WIDTH);
7570 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7571 lensShadingMap->lens_shading, 4U * map_width * map_height);
7572 }
7573
7574 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7575 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7576 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7577 }
7578
7579 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7580 //Populate CAM_INTF_META_TONEMAP_CURVES
7581 /* ch0 = G, ch 1 = B, ch 2 = R*/
7582 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7583 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7584 tonemap->tonemap_points_cnt,
7585 CAM_MAX_TONEMAP_CURVE_SIZE);
7586 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7587 }
7588
7589 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7590 &tonemap->curves[0].tonemap_points[0][0],
7591 tonemap->tonemap_points_cnt * 2);
7592
7593 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7594 &tonemap->curves[1].tonemap_points[0][0],
7595 tonemap->tonemap_points_cnt * 2);
7596
7597 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7598 &tonemap->curves[2].tonemap_points[0][0],
7599 tonemap->tonemap_points_cnt * 2);
7600 }
7601
7602 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7603 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7604 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7605 CC_GAIN_MAX);
7606 }
7607
7608 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7609 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7610 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7611 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7612 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7613 }
7614
7615 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7616 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7617 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7618 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7619 toneCurve->tonemap_points_cnt,
7620 CAM_MAX_TONEMAP_CURVE_SIZE);
7621 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7622 }
7623 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7624 (float*)toneCurve->curve.tonemap_points,
7625 toneCurve->tonemap_points_cnt * 2);
7626 }
7627
7628 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7629 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7630 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7631 predColorCorrectionGains->gains, 4);
7632 }
7633
7634 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7635 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7636 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7637 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7638 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7639 }
7640
7641 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7642 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7643 }
7644
7645 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7646 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7647 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7648 }
7649
7650 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7651 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7652 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7653 }
7654
7655 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7656 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7657 *effectMode);
7658 if (NAME_NOT_FOUND != val) {
7659 uint8_t fwk_effectMode = (uint8_t)val;
7660 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7661 }
7662 }
7663
7664 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7665 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7666 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7667 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7668 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7669 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7670 }
7671 int32_t fwk_testPatternData[4];
7672 fwk_testPatternData[0] = testPatternData->r;
7673 fwk_testPatternData[3] = testPatternData->b;
7674 switch (gCamCapability[mCameraId]->color_arrangement) {
7675 case CAM_FILTER_ARRANGEMENT_RGGB:
7676 case CAM_FILTER_ARRANGEMENT_GRBG:
7677 fwk_testPatternData[1] = testPatternData->gr;
7678 fwk_testPatternData[2] = testPatternData->gb;
7679 break;
7680 case CAM_FILTER_ARRANGEMENT_GBRG:
7681 case CAM_FILTER_ARRANGEMENT_BGGR:
7682 fwk_testPatternData[2] = testPatternData->gr;
7683 fwk_testPatternData[1] = testPatternData->gb;
7684 break;
7685 default:
7686 LOGE("color arrangement %d is not supported",
7687 gCamCapability[mCameraId]->color_arrangement);
7688 break;
7689 }
7690 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7691 }
7692
7693 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7694 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7695 }
7696
7697 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7698 String8 str((const char *)gps_methods);
7699 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7700 }
7701
7702 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7703 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7704 }
7705
7706 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7707 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7708 }
7709
7710 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7711 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7712 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7713 }
7714
7715 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7716 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7717 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7718 }
7719
7720 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7721 int32_t fwk_thumb_size[2];
7722 fwk_thumb_size[0] = thumb_size->width;
7723 fwk_thumb_size[1] = thumb_size->height;
7724 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7725 }
7726
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007727 // Skip reprocess metadata if there is no input stream.
7728 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7729 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7730 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7731 privateData,
7732 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7733 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007734 }
7735
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007736 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007737 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007738 meteringMode, 1);
7739 }
7740
Thierry Strudel54dc9782017-02-15 12:12:10 -08007741 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7742 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7743 LOGD("hdr_scene_data: %d %f\n",
7744 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7745 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7746 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7747 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7748 &isHdr, 1);
7749 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7750 &isHdrConfidence, 1);
7751 }
7752
7753
7754
Thierry Strudel3d639192016-09-09 11:52:26 -07007755 if (metadata->is_tuning_params_valid) {
7756 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7757 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7758 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7759
7760
7761 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7762 sizeof(uint32_t));
7763 data += sizeof(uint32_t);
7764
7765 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7766 sizeof(uint32_t));
7767 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7768 data += sizeof(uint32_t);
7769
7770 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7771 sizeof(uint32_t));
7772 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7773 data += sizeof(uint32_t);
7774
7775 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7776 sizeof(uint32_t));
7777 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7778 data += sizeof(uint32_t);
7779
7780 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7781 sizeof(uint32_t));
7782 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7783 data += sizeof(uint32_t);
7784
7785 metadata->tuning_params.tuning_mod3_data_size = 0;
7786 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7787 sizeof(uint32_t));
7788 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7789 data += sizeof(uint32_t);
7790
7791 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7792 TUNING_SENSOR_DATA_MAX);
7793 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7794 count);
7795 data += count;
7796
7797 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7798 TUNING_VFE_DATA_MAX);
7799 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7800 count);
7801 data += count;
7802
7803 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7804 TUNING_CPP_DATA_MAX);
7805 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7806 count);
7807 data += count;
7808
7809 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7810 TUNING_CAC_DATA_MAX);
7811 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7812 count);
7813 data += count;
7814
7815 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7816 (int32_t *)(void *)tuning_meta_data_blob,
7817 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7818 }
7819
7820 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7821 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7822 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7823 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7824 NEUTRAL_COL_POINTS);
7825 }
7826
7827 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7828 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7829 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7830 }
7831
7832 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7833 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7834 // Adjust crop region from sensor output coordinate system to active
7835 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007836 cam_rect_t hAeRect = hAeRegions->rect;
7837 mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7838 hAeRect.width, hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007839
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007840 convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
Thierry Strudel3d639192016-09-09 11:52:26 -07007841 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7842 REGIONS_TUPLE_COUNT);
7843 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7844 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007845 hAeRect.left, hAeRect.top, hAeRect.width,
7846 hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007847 }
7848
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007849 if (!pendingRequest.focusStateSent) {
7850 if (pendingRequest.focusStateValid) {
7851 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7852 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007853 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007854 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7855 uint8_t fwk_afState = (uint8_t) *afState;
7856 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7857 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7858 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007859 }
7860 }
7861
Thierry Strudel3d639192016-09-09 11:52:26 -07007862 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7863 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7864 }
7865
7866 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7867 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7868 }
7869
7870 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7871 uint8_t fwk_lensState = *lensState;
7872 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7873 }
7874
Thierry Strudel3d639192016-09-09 11:52:26 -07007875 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007876 uint32_t ab_mode = *hal_ab_mode;
7877 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7878 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7879 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7880 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007881 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007882 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007883 if (NAME_NOT_FOUND != val) {
7884 uint8_t fwk_ab_mode = (uint8_t)val;
7885 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7886 }
7887 }
7888
7889 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7890 int val = lookupFwkName(SCENE_MODES_MAP,
7891 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7892 if (NAME_NOT_FOUND != val) {
7893 uint8_t fwkBestshotMode = (uint8_t)val;
7894 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7895 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7896 } else {
7897 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7898 }
7899 }
7900
7901 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7902 uint8_t fwk_mode = (uint8_t) *mode;
7903 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7904 }
7905
7906 /* Constant metadata values to be update*/
7907 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7908 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7909
7910 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7911 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7912
7913 int32_t hotPixelMap[2];
7914 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7915
7916 // CDS
7917 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7918 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7919 }
7920
Thierry Strudel04e026f2016-10-10 11:27:36 -07007921 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7922 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007923 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007924 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7925 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7926 } else {
7927 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7928 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007929
7930 if(fwk_hdr != curr_hdr_state) {
7931 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7932 if(fwk_hdr)
7933 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7934 else
7935 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7936 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007937 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7938 }
7939
Thierry Strudel54dc9782017-02-15 12:12:10 -08007940 //binning correction
7941 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7942 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7943 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7944 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7945 }
7946
Thierry Strudel04e026f2016-10-10 11:27:36 -07007947 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007948 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007949 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7950 int8_t is_ir_on = 0;
7951
7952 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7953 if(is_ir_on != curr_ir_state) {
7954 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7955 if(is_ir_on)
7956 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7957 else
7958 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7959 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007960 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007961 }
7962
Thierry Strudel269c81a2016-10-12 12:13:59 -07007963 // AEC SPEED
7964 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7965 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7966 }
7967
7968 // AWB SPEED
7969 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7970 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7971 }
7972
Thierry Strudel3d639192016-09-09 11:52:26 -07007973 // TNR
7974 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7975 uint8_t tnr_enable = tnr->denoise_enable;
7976 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007977 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7978 int8_t is_tnr_on = 0;
7979
7980 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7981 if(is_tnr_on != curr_tnr_state) {
7982 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7983 if(is_tnr_on)
7984 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7985 else
7986 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7987 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007988
7989 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7990 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7991 }
7992
7993 // Reprocess crop data
7994 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7995 uint8_t cnt = crop_data->num_of_streams;
7996 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7997 // mm-qcamera-daemon only posts crop_data for streams
7998 // not linked to pproc. So no valid crop metadata is not
7999 // necessarily an error case.
8000 LOGD("No valid crop metadata entries");
8001 } else {
8002 uint32_t reproc_stream_id;
8003 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8004 LOGD("No reprocessible stream found, ignore crop data");
8005 } else {
8006 int rc = NO_ERROR;
8007 Vector<int32_t> roi_map;
8008 int32_t *crop = new int32_t[cnt*4];
8009 if (NULL == crop) {
8010 rc = NO_MEMORY;
8011 }
8012 if (NO_ERROR == rc) {
8013 int32_t streams_found = 0;
8014 for (size_t i = 0; i < cnt; i++) {
8015 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
8016 if (pprocDone) {
8017 // HAL already does internal reprocessing,
8018 // either via reprocessing before JPEG encoding,
8019 // or offline postprocessing for pproc bypass case.
8020 crop[0] = 0;
8021 crop[1] = 0;
8022 crop[2] = mInputStreamInfo.dim.width;
8023 crop[3] = mInputStreamInfo.dim.height;
8024 } else {
8025 crop[0] = crop_data->crop_info[i].crop.left;
8026 crop[1] = crop_data->crop_info[i].crop.top;
8027 crop[2] = crop_data->crop_info[i].crop.width;
8028 crop[3] = crop_data->crop_info[i].crop.height;
8029 }
8030 roi_map.add(crop_data->crop_info[i].roi_map.left);
8031 roi_map.add(crop_data->crop_info[i].roi_map.top);
8032 roi_map.add(crop_data->crop_info[i].roi_map.width);
8033 roi_map.add(crop_data->crop_info[i].roi_map.height);
8034 streams_found++;
8035 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
8036 crop[0], crop[1], crop[2], crop[3]);
8037 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
8038 crop_data->crop_info[i].roi_map.left,
8039 crop_data->crop_info[i].roi_map.top,
8040 crop_data->crop_info[i].roi_map.width,
8041 crop_data->crop_info[i].roi_map.height);
8042 break;
8043
8044 }
8045 }
8046 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
8047 &streams_found, 1);
8048 camMetadata.update(QCAMERA3_CROP_REPROCESS,
8049 crop, (size_t)(streams_found * 4));
8050 if (roi_map.array()) {
8051 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
8052 roi_map.array(), roi_map.size());
8053 }
8054 }
8055 if (crop) {
8056 delete [] crop;
8057 }
8058 }
8059 }
8060 }
8061
8062 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8063 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
8064 // so hardcoding the CAC result to OFF mode.
8065 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8066 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
8067 } else {
8068 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
8069 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8070 *cacMode);
8071 if (NAME_NOT_FOUND != val) {
8072 uint8_t resultCacMode = (uint8_t)val;
8073 // check whether CAC result from CB is equal to Framework set CAC mode
8074 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008075 if (pendingRequest.fwkCacMode != resultCacMode) {
8076 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07008077 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08008078 //Check if CAC is disabled by property
8079 if (m_cacModeDisabled) {
8080 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8081 }
8082
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008083 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07008084 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
8085 } else {
8086 LOGE("Invalid CAC camera parameter: %d", *cacMode);
8087 }
8088 }
8089 }
8090
8091 // Post blob of cam_cds_data through vendor tag.
8092 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8093 uint8_t cnt = cdsInfo->num_of_streams;
8094 cam_cds_data_t cdsDataOverride;
8095 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8096 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8097 cdsDataOverride.num_of_streams = 1;
8098 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8099 uint32_t reproc_stream_id;
8100 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8101 LOGD("No reprocessible stream found, ignore cds data");
8102 } else {
8103 for (size_t i = 0; i < cnt; i++) {
8104 if (cdsInfo->cds_info[i].stream_id ==
8105 reproc_stream_id) {
8106 cdsDataOverride.cds_info[0].cds_enable =
8107 cdsInfo->cds_info[i].cds_enable;
8108 break;
8109 }
8110 }
8111 }
8112 } else {
8113 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8114 }
8115 camMetadata.update(QCAMERA3_CDS_INFO,
8116 (uint8_t *)&cdsDataOverride,
8117 sizeof(cam_cds_data_t));
8118 }
8119
8120 // Ldaf calibration data
8121 if (!mLdafCalibExist) {
8122 IF_META_AVAILABLE(uint32_t, ldafCalib,
8123 CAM_INTF_META_LDAF_EXIF, metadata) {
8124 mLdafCalibExist = true;
8125 mLdafCalib[0] = ldafCalib[0];
8126 mLdafCalib[1] = ldafCalib[1];
8127 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8128 ldafCalib[0], ldafCalib[1]);
8129 }
8130 }
8131
Thierry Strudel54dc9782017-02-15 12:12:10 -08008132 // EXIF debug data through vendor tag
8133 /*
8134 * Mobicat Mask can assume 3 values:
8135 * 1 refers to Mobicat data,
8136 * 2 refers to Stats Debug and Exif Debug Data
8137 * 3 refers to Mobicat and Stats Debug Data
8138 * We want to make sure that we are sending Exif debug data
8139 * only when Mobicat Mask is 2.
8140 */
8141 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8142 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8143 (uint8_t *)(void *)mExifParams.debug_params,
8144 sizeof(mm_jpeg_debug_exif_params_t));
8145 }
8146
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008147 // Reprocess and DDM debug data through vendor tag
8148 cam_reprocess_info_t repro_info;
8149 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008150 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8151 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008152 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008153 }
8154 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8155 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008156 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008157 }
8158 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8159 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008160 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008161 }
8162 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8163 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008164 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008165 }
8166 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8167 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008168 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008169 }
8170 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008171 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008172 }
8173 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8174 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008175 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008176 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008177 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8178 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8179 }
8180 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8181 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8182 }
8183 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8184 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008185
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008186 // INSTANT AEC MODE
8187 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8188 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8189 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8190 }
8191
Shuzhen Wange763e802016-03-31 10:24:29 -07008192 // AF scene change
8193 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8194 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8195 }
8196
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008197 // Enable ZSL
8198 if (enableZsl != nullptr) {
8199 uint8_t value = *enableZsl ?
8200 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8201 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8202 }
8203
Xu Han821ea9c2017-05-23 09:00:40 -07008204 // OIS Data
8205 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8206 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8207 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8208 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8209 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8210 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8211 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8212 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8213 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8214 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8215 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
Xue Tu2c3e9142017-08-18 16:23:52 -07008216 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8217 frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8218 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8219 frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
Xu Han821ea9c2017-05-23 09:00:40 -07008220 }
8221
Thierry Strudel3d639192016-09-09 11:52:26 -07008222 resultMetadata = camMetadata.release();
8223 return resultMetadata;
8224}
8225
8226/*===========================================================================
8227 * FUNCTION : saveExifParams
8228 *
8229 * DESCRIPTION:
8230 *
8231 * PARAMETERS :
8232 * @metadata : metadata information from callback
8233 *
8234 * RETURN : none
8235 *
8236 *==========================================================================*/
8237void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8238{
8239 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8240 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8241 if (mExifParams.debug_params) {
8242 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8243 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8244 }
8245 }
8246 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8247 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8248 if (mExifParams.debug_params) {
8249 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8250 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8251 }
8252 }
8253 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8254 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8255 if (mExifParams.debug_params) {
8256 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8257 mExifParams.debug_params->af_debug_params_valid = TRUE;
8258 }
8259 }
8260 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8261 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8262 if (mExifParams.debug_params) {
8263 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8264 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8265 }
8266 }
8267 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8268 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8269 if (mExifParams.debug_params) {
8270 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8271 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8272 }
8273 }
8274 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8275 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8276 if (mExifParams.debug_params) {
8277 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8278 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8279 }
8280 }
8281 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8282 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8283 if (mExifParams.debug_params) {
8284 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8285 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8286 }
8287 }
8288 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8289 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8290 if (mExifParams.debug_params) {
8291 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8292 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8293 }
8294 }
8295}
8296
8297/*===========================================================================
8298 * FUNCTION : get3AExifParams
8299 *
8300 * DESCRIPTION:
8301 *
8302 * PARAMETERS : none
8303 *
8304 *
8305 * RETURN : mm_jpeg_exif_params_t
8306 *
8307 *==========================================================================*/
8308mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8309{
8310 return mExifParams;
8311}
8312
8313/*===========================================================================
8314 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8315 *
8316 * DESCRIPTION:
8317 *
8318 * PARAMETERS :
8319 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008320 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8321 * urgent metadata in a batch. Always true for
8322 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008323 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008324 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8325 * i.e. even though it doesn't map to a valid partial
8326 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008327 * RETURN : camera_metadata_t*
8328 * metadata in a format specified by fwk
8329 *==========================================================================*/
8330camera_metadata_t*
8331QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008332 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008333 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008334{
8335 CameraMetadata camMetadata;
8336 camera_metadata_t *resultMetadata;
8337
Shuzhen Wang485e2442017-08-02 12:21:08 -07008338 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008339 /* In batch mode, use empty metadata if this is not the last in batch
8340 */
8341 resultMetadata = allocate_camera_metadata(0, 0);
8342 return resultMetadata;
8343 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008344
8345 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8346 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8347 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8348 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8349 }
8350
8351 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8352 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8353 &aecTrigger->trigger, 1);
8354 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8355 &aecTrigger->trigger_id, 1);
8356 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8357 aecTrigger->trigger);
8358 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8359 aecTrigger->trigger_id);
8360 }
8361
8362 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8363 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8364 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8365 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8366 }
8367
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008368 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8369 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8370 if (NAME_NOT_FOUND != val) {
8371 uint8_t fwkAfMode = (uint8_t)val;
8372 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8373 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8374 } else {
8375 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8376 val);
8377 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008378 }
8379
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008380 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8381 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8382 af_trigger->trigger);
8383 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8384 af_trigger->trigger_id);
8385
8386 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8387 mAfTrigger = *af_trigger;
8388 uint32_t fwk_AfState = (uint32_t) *afState;
8389
8390 // If this is the result for a new trigger, check if there is new early
8391 // af state. If there is, use the last af state for all results
8392 // preceding current partial frame number.
8393 for (auto & pendingRequest : mPendingRequestsList) {
8394 if (pendingRequest.frame_number < frame_number) {
8395 pendingRequest.focusStateValid = true;
8396 pendingRequest.focusState = fwk_AfState;
8397 } else if (pendingRequest.frame_number == frame_number) {
8398 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8399 // Check if early AF state for trigger exists. If yes, send AF state as
8400 // partial result for better latency.
8401 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8402 pendingRequest.focusStateSent = true;
8403 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8404 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8405 frame_number, fwkEarlyAfState);
8406 }
8407 }
8408 }
8409 }
8410 }
8411 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8412 &mAfTrigger.trigger, 1);
8413 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8414
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008415 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8416 /*af regions*/
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008417 cam_rect_t hAfRect = hAfRegions->rect;
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008418 int32_t afRegions[REGIONS_TUPLE_COUNT];
8419 // Adjust crop region from sensor output coordinate system to active
8420 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008421 mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8422 hAfRect.width, hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008423
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008424 convertToRegions(hAfRect, afRegions, hAfRegions->weight);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008425 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8426 REGIONS_TUPLE_COUNT);
8427 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8428 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008429 hAfRect.left, hAfRect.top, hAfRect.width,
8430 hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008431 }
8432
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008433 // AF region confidence
8434 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8435 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8436 }
8437
Thierry Strudel3d639192016-09-09 11:52:26 -07008438 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8439 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8440 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8441 if (NAME_NOT_FOUND != val) {
8442 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8443 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8444 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8445 } else {
8446 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8447 }
8448 }
8449
8450 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8451 uint32_t aeMode = CAM_AE_MODE_MAX;
8452 int32_t flashMode = CAM_FLASH_MODE_MAX;
8453 int32_t redeye = -1;
8454 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8455 aeMode = *pAeMode;
8456 }
8457 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8458 flashMode = *pFlashMode;
8459 }
8460 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8461 redeye = *pRedeye;
8462 }
8463
8464 if (1 == redeye) {
8465 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8466 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8467 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8468 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8469 flashMode);
8470 if (NAME_NOT_FOUND != val) {
8471 fwk_aeMode = (uint8_t)val;
8472 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8473 } else {
8474 LOGE("Unsupported flash mode %d", flashMode);
8475 }
8476 } else if (aeMode == CAM_AE_MODE_ON) {
8477 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8478 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8479 } else if (aeMode == CAM_AE_MODE_OFF) {
8480 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8481 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008482 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8483 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8484 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008485 } else {
8486 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8487 "flashMode:%d, aeMode:%u!!!",
8488 redeye, flashMode, aeMode);
8489 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008490 if (mInstantAEC) {
8491 // Increment frame Idx count untill a bound reached for instant AEC.
8492 mInstantAecFrameIdxCount++;
8493 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8494 CAM_INTF_META_AEC_INFO, metadata) {
8495 LOGH("ae_params->settled = %d",ae_params->settled);
8496 // If AEC settled, or if number of frames reached bound value,
8497 // should reset instant AEC.
8498 if (ae_params->settled ||
8499 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8500 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8501 mInstantAEC = false;
8502 mResetInstantAEC = true;
8503 mInstantAecFrameIdxCount = 0;
8504 }
8505 }
8506 }
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07008507
8508 IF_META_AVAILABLE(int32_t, af_tof_confidence,
8509 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
8510 IF_META_AVAILABLE(int32_t, af_tof_distance,
8511 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
8512 int32_t fwk_af_tof_confidence = *af_tof_confidence;
8513 int32_t fwk_af_tof_distance = *af_tof_distance;
8514 if (fwk_af_tof_confidence == 1) {
8515 mSceneDistance = fwk_af_tof_distance;
8516 } else {
8517 mSceneDistance = -1;
8518 }
8519 LOGD("tof_distance %d, tof_confidence %d, mSceneDistance %d",
8520 fwk_af_tof_distance, fwk_af_tof_confidence, mSceneDistance);
8521 }
8522 }
8523 camMetadata.update(NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE, &mSceneDistance, 1);
8524
Thierry Strudel3d639192016-09-09 11:52:26 -07008525 resultMetadata = camMetadata.release();
8526 return resultMetadata;
8527}
8528
8529/*===========================================================================
8530 * FUNCTION : dumpMetadataToFile
8531 *
8532 * DESCRIPTION: Dumps tuning metadata to file system
8533 *
8534 * PARAMETERS :
8535 * @meta : tuning metadata
8536 * @dumpFrameCount : current dump frame count
8537 * @enabled : Enable mask
8538 *
8539 *==========================================================================*/
8540void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8541 uint32_t &dumpFrameCount,
8542 bool enabled,
8543 const char *type,
8544 uint32_t frameNumber)
8545{
8546 //Some sanity checks
8547 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8548 LOGE("Tuning sensor data size bigger than expected %d: %d",
8549 meta.tuning_sensor_data_size,
8550 TUNING_SENSOR_DATA_MAX);
8551 return;
8552 }
8553
8554 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8555 LOGE("Tuning VFE data size bigger than expected %d: %d",
8556 meta.tuning_vfe_data_size,
8557 TUNING_VFE_DATA_MAX);
8558 return;
8559 }
8560
8561 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8562 LOGE("Tuning CPP data size bigger than expected %d: %d",
8563 meta.tuning_cpp_data_size,
8564 TUNING_CPP_DATA_MAX);
8565 return;
8566 }
8567
8568 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8569 LOGE("Tuning CAC data size bigger than expected %d: %d",
8570 meta.tuning_cac_data_size,
8571 TUNING_CAC_DATA_MAX);
8572 return;
8573 }
8574 //
8575
8576 if(enabled){
8577 char timeBuf[FILENAME_MAX];
8578 char buf[FILENAME_MAX];
8579 memset(buf, 0, sizeof(buf));
8580 memset(timeBuf, 0, sizeof(timeBuf));
8581 time_t current_time;
8582 struct tm * timeinfo;
8583 time (&current_time);
8584 timeinfo = localtime (&current_time);
8585 if (timeinfo != NULL) {
8586 strftime (timeBuf, sizeof(timeBuf),
8587 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8588 }
8589 String8 filePath(timeBuf);
8590 snprintf(buf,
8591 sizeof(buf),
8592 "%dm_%s_%d.bin",
8593 dumpFrameCount,
8594 type,
8595 frameNumber);
8596 filePath.append(buf);
8597 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8598 if (file_fd >= 0) {
8599 ssize_t written_len = 0;
8600 meta.tuning_data_version = TUNING_DATA_VERSION;
8601 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8602 written_len += write(file_fd, data, sizeof(uint32_t));
8603 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8604 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8605 written_len += write(file_fd, data, sizeof(uint32_t));
8606 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8607 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8608 written_len += write(file_fd, data, sizeof(uint32_t));
8609 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8610 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8611 written_len += write(file_fd, data, sizeof(uint32_t));
8612 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8613 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8614 written_len += write(file_fd, data, sizeof(uint32_t));
8615 meta.tuning_mod3_data_size = 0;
8616 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8617 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8618 written_len += write(file_fd, data, sizeof(uint32_t));
8619 size_t total_size = meta.tuning_sensor_data_size;
8620 data = (void *)((uint8_t *)&meta.data);
8621 written_len += write(file_fd, data, total_size);
8622 total_size = meta.tuning_vfe_data_size;
8623 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8624 written_len += write(file_fd, data, total_size);
8625 total_size = meta.tuning_cpp_data_size;
8626 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8627 written_len += write(file_fd, data, total_size);
8628 total_size = meta.tuning_cac_data_size;
8629 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8630 written_len += write(file_fd, data, total_size);
8631 close(file_fd);
8632 }else {
8633 LOGE("fail to open file for metadata dumping");
8634 }
8635 }
8636}
8637
8638/*===========================================================================
8639 * FUNCTION : cleanAndSortStreamInfo
8640 *
8641 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8642 * and sort them such that raw stream is at the end of the list
8643 * This is a workaround for camera daemon constraint.
8644 *
8645 * PARAMETERS : None
8646 *
8647 *==========================================================================*/
8648void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8649{
8650 List<stream_info_t *> newStreamInfo;
8651
8652 /*clean up invalid streams*/
8653 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8654 it != mStreamInfo.end();) {
8655 if(((*it)->status) == INVALID){
8656 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8657 delete channel;
8658 free(*it);
8659 it = mStreamInfo.erase(it);
8660 } else {
8661 it++;
8662 }
8663 }
8664
8665 // Move preview/video/callback/snapshot streams into newList
8666 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8667 it != mStreamInfo.end();) {
8668 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8669 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8670 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8671 newStreamInfo.push_back(*it);
8672 it = mStreamInfo.erase(it);
8673 } else
8674 it++;
8675 }
8676 // Move raw streams into newList
8677 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8678 it != mStreamInfo.end();) {
8679 newStreamInfo.push_back(*it);
8680 it = mStreamInfo.erase(it);
8681 }
8682
8683 mStreamInfo = newStreamInfo;
Chien-Yu Chen3d836272017-09-20 11:10:21 -07008684
8685 // Make sure that stream IDs are unique.
8686 uint32_t id = 0;
8687 for (auto streamInfo : mStreamInfo) {
8688 streamInfo->id = id++;
8689 }
8690
Thierry Strudel3d639192016-09-09 11:52:26 -07008691}
8692
8693/*===========================================================================
8694 * FUNCTION : extractJpegMetadata
8695 *
8696 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8697 * JPEG metadata is cached in HAL, and return as part of capture
8698 * result when metadata is returned from camera daemon.
8699 *
8700 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8701 * @request: capture request
8702 *
8703 *==========================================================================*/
8704void QCamera3HardwareInterface::extractJpegMetadata(
8705 CameraMetadata& jpegMetadata,
8706 const camera3_capture_request_t *request)
8707{
8708 CameraMetadata frame_settings;
8709 frame_settings = request->settings;
8710
8711 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8712 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8713 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8714 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8715
8716 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8717 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8718 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8719 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8720
8721 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8722 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8723 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8724 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8725
8726 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8727 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8728 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8729 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8730
8731 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8732 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8733 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8734 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8735
8736 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8737 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8738 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8739 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8740
8741 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8742 int32_t thumbnail_size[2];
8743 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8744 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8745 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8746 int32_t orientation =
8747 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008748 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008749 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8750 int32_t temp;
8751 temp = thumbnail_size[0];
8752 thumbnail_size[0] = thumbnail_size[1];
8753 thumbnail_size[1] = temp;
8754 }
8755 }
8756 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8757 thumbnail_size,
8758 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8759 }
8760
8761}
8762
8763/*===========================================================================
8764 * FUNCTION : convertToRegions
8765 *
8766 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8767 *
8768 * PARAMETERS :
8769 * @rect : cam_rect_t struct to convert
8770 * @region : int32_t destination array
8771 * @weight : if we are converting from cam_area_t, weight is valid
8772 * else weight = -1
8773 *
8774 *==========================================================================*/
8775void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8776 int32_t *region, int weight)
8777{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008778 region[FACE_LEFT] = rect.left;
8779 region[FACE_TOP] = rect.top;
8780 region[FACE_RIGHT] = rect.left + rect.width;
8781 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008782 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008783 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008784 }
8785}
8786
8787/*===========================================================================
8788 * FUNCTION : convertFromRegions
8789 *
8790 * DESCRIPTION: helper method to convert from array to cam_rect_t
8791 *
8792 * PARAMETERS :
8793 * @rect : cam_rect_t struct to convert
8794 * @region : int32_t destination array
8795 * @weight : if we are converting from cam_area_t, weight is valid
8796 * else weight = -1
8797 *
8798 *==========================================================================*/
8799void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008800 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008801{
Thierry Strudel3d639192016-09-09 11:52:26 -07008802 int32_t x_min = frame_settings.find(tag).data.i32[0];
8803 int32_t y_min = frame_settings.find(tag).data.i32[1];
8804 int32_t x_max = frame_settings.find(tag).data.i32[2];
8805 int32_t y_max = frame_settings.find(tag).data.i32[3];
8806 roi.weight = frame_settings.find(tag).data.i32[4];
8807 roi.rect.left = x_min;
8808 roi.rect.top = y_min;
8809 roi.rect.width = x_max - x_min;
8810 roi.rect.height = y_max - y_min;
8811}
8812
8813/*===========================================================================
8814 * FUNCTION : resetIfNeededROI
8815 *
8816 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8817 * crop region
8818 *
8819 * PARAMETERS :
8820 * @roi : cam_area_t struct to resize
8821 * @scalerCropRegion : cam_crop_region_t region to compare against
8822 *
8823 *
8824 *==========================================================================*/
8825bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8826 const cam_crop_region_t* scalerCropRegion)
8827{
8828 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8829 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8830 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8831 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8832
8833 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8834 * without having this check the calculations below to validate if the roi
8835 * is inside scalar crop region will fail resulting in the roi not being
8836 * reset causing algorithm to continue to use stale roi window
8837 */
8838 if (roi->weight == 0) {
8839 return true;
8840 }
8841
8842 if ((roi_x_max < scalerCropRegion->left) ||
8843 // right edge of roi window is left of scalar crop's left edge
8844 (roi_y_max < scalerCropRegion->top) ||
8845 // bottom edge of roi window is above scalar crop's top edge
8846 (roi->rect.left > crop_x_max) ||
8847 // left edge of roi window is beyond(right) of scalar crop's right edge
8848 (roi->rect.top > crop_y_max)){
8849 // top edge of roi windo is above scalar crop's top edge
8850 return false;
8851 }
8852 if (roi->rect.left < scalerCropRegion->left) {
8853 roi->rect.left = scalerCropRegion->left;
8854 }
8855 if (roi->rect.top < scalerCropRegion->top) {
8856 roi->rect.top = scalerCropRegion->top;
8857 }
8858 if (roi_x_max > crop_x_max) {
8859 roi_x_max = crop_x_max;
8860 }
8861 if (roi_y_max > crop_y_max) {
8862 roi_y_max = crop_y_max;
8863 }
8864 roi->rect.width = roi_x_max - roi->rect.left;
8865 roi->rect.height = roi_y_max - roi->rect.top;
8866 return true;
8867}
8868
8869/*===========================================================================
8870 * FUNCTION : convertLandmarks
8871 *
8872 * DESCRIPTION: helper method to extract the landmarks from face detection info
8873 *
8874 * PARAMETERS :
8875 * @landmark_data : input landmark data to be converted
8876 * @landmarks : int32_t destination array
8877 *
8878 *
8879 *==========================================================================*/
8880void QCamera3HardwareInterface::convertLandmarks(
8881 cam_face_landmarks_info_t landmark_data,
8882 int32_t *landmarks)
8883{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008884 if (landmark_data.is_left_eye_valid) {
8885 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8886 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8887 } else {
8888 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8889 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8890 }
8891
8892 if (landmark_data.is_right_eye_valid) {
8893 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8894 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8895 } else {
8896 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8897 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8898 }
8899
8900 if (landmark_data.is_mouth_valid) {
8901 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8902 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8903 } else {
8904 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8905 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8906 }
8907}
8908
8909/*===========================================================================
8910 * FUNCTION : setInvalidLandmarks
8911 *
8912 * DESCRIPTION: helper method to set invalid landmarks
8913 *
8914 * PARAMETERS :
8915 * @landmarks : int32_t destination array
8916 *
8917 *
8918 *==========================================================================*/
8919void QCamera3HardwareInterface::setInvalidLandmarks(
8920 int32_t *landmarks)
8921{
8922 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8923 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8924 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8925 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8926 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8927 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008928}
8929
8930#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008931
8932/*===========================================================================
8933 * FUNCTION : getCapabilities
8934 *
8935 * DESCRIPTION: query camera capability from back-end
8936 *
8937 * PARAMETERS :
8938 * @ops : mm-interface ops structure
8939 * @cam_handle : camera handle for which we need capability
8940 *
8941 * RETURN : ptr type of capability structure
8942 * capability for success
8943 * NULL for failure
8944 *==========================================================================*/
8945cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8946 uint32_t cam_handle)
8947{
8948 int rc = NO_ERROR;
8949 QCamera3HeapMemory *capabilityHeap = NULL;
8950 cam_capability_t *cap_ptr = NULL;
8951
8952 if (ops == NULL) {
8953 LOGE("Invalid arguments");
8954 return NULL;
8955 }
8956
8957 capabilityHeap = new QCamera3HeapMemory(1);
8958 if (capabilityHeap == NULL) {
8959 LOGE("creation of capabilityHeap failed");
8960 return NULL;
8961 }
8962
8963 /* Allocate memory for capability buffer */
8964 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8965 if(rc != OK) {
8966 LOGE("No memory for cappability");
8967 goto allocate_failed;
8968 }
8969
8970 /* Map memory for capability buffer */
8971 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8972
8973 rc = ops->map_buf(cam_handle,
8974 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8975 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8976 if(rc < 0) {
8977 LOGE("failed to map capability buffer");
8978 rc = FAILED_TRANSACTION;
8979 goto map_failed;
8980 }
8981
8982 /* Query Capability */
8983 rc = ops->query_capability(cam_handle);
8984 if(rc < 0) {
8985 LOGE("failed to query capability");
8986 rc = FAILED_TRANSACTION;
8987 goto query_failed;
8988 }
8989
8990 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8991 if (cap_ptr == NULL) {
8992 LOGE("out of memory");
8993 rc = NO_MEMORY;
8994 goto query_failed;
8995 }
8996
8997 memset(cap_ptr, 0, sizeof(cam_capability_t));
8998 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8999
9000 int index;
9001 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
9002 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
9003 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
9004 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
9005 }
9006
9007query_failed:
9008 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
9009map_failed:
9010 capabilityHeap->deallocate();
9011allocate_failed:
9012 delete capabilityHeap;
9013
9014 if (rc != NO_ERROR) {
9015 return NULL;
9016 } else {
9017 return cap_ptr;
9018 }
9019}
9020
Thierry Strudel3d639192016-09-09 11:52:26 -07009021/*===========================================================================
9022 * FUNCTION : initCapabilities
9023 *
9024 * DESCRIPTION: initialize camera capabilities in static data struct
9025 *
9026 * PARAMETERS :
9027 * @cameraId : camera Id
9028 *
9029 * RETURN : int32_t type of status
9030 * NO_ERROR -- success
9031 * none-zero failure code
9032 *==========================================================================*/
9033int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
9034{
9035 int rc = 0;
9036 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009037 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07009038
9039 rc = camera_open((uint8_t)cameraId, &cameraHandle);
9040 if (rc) {
9041 LOGE("camera_open failed. rc = %d", rc);
9042 goto open_failed;
9043 }
9044 if (!cameraHandle) {
9045 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
9046 goto open_failed;
9047 }
9048
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009049 handle = get_main_camera_handle(cameraHandle->camera_handle);
9050 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
9051 if (gCamCapability[cameraId] == NULL) {
9052 rc = FAILED_TRANSACTION;
9053 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07009054 }
9055
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009056 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009057 if (is_dual_camera_by_idx(cameraId)) {
9058 handle = get_aux_camera_handle(cameraHandle->camera_handle);
9059 gCamCapability[cameraId]->aux_cam_cap =
9060 getCapabilities(cameraHandle->ops, handle);
9061 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
9062 rc = FAILED_TRANSACTION;
9063 free(gCamCapability[cameraId]);
9064 goto failed_op;
9065 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009066
9067 // Copy the main camera capability to main_cam_cap struct
9068 gCamCapability[cameraId]->main_cam_cap =
9069 (cam_capability_t *)malloc(sizeof(cam_capability_t));
9070 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
9071 LOGE("out of memory");
9072 rc = NO_MEMORY;
9073 goto failed_op;
9074 }
9075 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
9076 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07009077 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009078failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07009079 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
9080 cameraHandle = NULL;
9081open_failed:
9082 return rc;
9083}
9084
9085/*==========================================================================
9086 * FUNCTION : get3Aversion
9087 *
9088 * DESCRIPTION: get the Q3A S/W version
9089 *
9090 * PARAMETERS :
9091 * @sw_version: Reference of Q3A structure which will hold version info upon
9092 * return
9093 *
9094 * RETURN : None
9095 *
9096 *==========================================================================*/
9097void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
9098{
9099 if(gCamCapability[mCameraId])
9100 sw_version = gCamCapability[mCameraId]->q3a_version;
9101 else
9102 LOGE("Capability structure NULL!");
9103}
9104
9105
9106/*===========================================================================
9107 * FUNCTION : initParameters
9108 *
9109 * DESCRIPTION: initialize camera parameters
9110 *
9111 * PARAMETERS :
9112 *
9113 * RETURN : int32_t type of status
9114 * NO_ERROR -- success
9115 * none-zero failure code
9116 *==========================================================================*/
9117int QCamera3HardwareInterface::initParameters()
9118{
9119 int rc = 0;
9120
9121 //Allocate Set Param Buffer
9122 mParamHeap = new QCamera3HeapMemory(1);
9123 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9124 if(rc != OK) {
9125 rc = NO_MEMORY;
9126 LOGE("Failed to allocate SETPARM Heap memory");
9127 delete mParamHeap;
9128 mParamHeap = NULL;
9129 return rc;
9130 }
9131
9132 //Map memory for parameters buffer
9133 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9134 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9135 mParamHeap->getFd(0),
9136 sizeof(metadata_buffer_t),
9137 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9138 if(rc < 0) {
9139 LOGE("failed to map SETPARM buffer");
9140 rc = FAILED_TRANSACTION;
9141 mParamHeap->deallocate();
9142 delete mParamHeap;
9143 mParamHeap = NULL;
9144 return rc;
9145 }
9146
9147 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9148
9149 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9150 return rc;
9151}
9152
9153/*===========================================================================
9154 * FUNCTION : deinitParameters
9155 *
9156 * DESCRIPTION: de-initialize camera parameters
9157 *
9158 * PARAMETERS :
9159 *
9160 * RETURN : NONE
9161 *==========================================================================*/
9162void QCamera3HardwareInterface::deinitParameters()
9163{
9164 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9165 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9166
9167 mParamHeap->deallocate();
9168 delete mParamHeap;
9169 mParamHeap = NULL;
9170
9171 mParameters = NULL;
9172
9173 free(mPrevParameters);
9174 mPrevParameters = NULL;
9175}
9176
9177/*===========================================================================
9178 * FUNCTION : calcMaxJpegSize
9179 *
9180 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9181 *
9182 * PARAMETERS :
9183 *
9184 * RETURN : max_jpeg_size
9185 *==========================================================================*/
9186size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9187{
9188 size_t max_jpeg_size = 0;
9189 size_t temp_width, temp_height;
9190 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9191 MAX_SIZES_CNT);
9192 for (size_t i = 0; i < count; i++) {
9193 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9194 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9195 if (temp_width * temp_height > max_jpeg_size ) {
9196 max_jpeg_size = temp_width * temp_height;
9197 }
9198 }
9199 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9200 return max_jpeg_size;
9201}
9202
9203/*===========================================================================
9204 * FUNCTION : getMaxRawSize
9205 *
9206 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9207 *
9208 * PARAMETERS :
9209 *
9210 * RETURN : Largest supported Raw Dimension
9211 *==========================================================================*/
9212cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9213{
9214 int max_width = 0;
9215 cam_dimension_t maxRawSize;
9216
9217 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9218 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9219 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9220 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9221 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9222 }
9223 }
9224 return maxRawSize;
9225}
9226
9227
9228/*===========================================================================
9229 * FUNCTION : calcMaxJpegDim
9230 *
9231 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9232 *
9233 * PARAMETERS :
9234 *
9235 * RETURN : max_jpeg_dim
9236 *==========================================================================*/
9237cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9238{
9239 cam_dimension_t max_jpeg_dim;
9240 cam_dimension_t curr_jpeg_dim;
9241 max_jpeg_dim.width = 0;
9242 max_jpeg_dim.height = 0;
9243 curr_jpeg_dim.width = 0;
9244 curr_jpeg_dim.height = 0;
9245 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9246 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9247 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9248 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9249 max_jpeg_dim.width * max_jpeg_dim.height ) {
9250 max_jpeg_dim.width = curr_jpeg_dim.width;
9251 max_jpeg_dim.height = curr_jpeg_dim.height;
9252 }
9253 }
9254 return max_jpeg_dim;
9255}
9256
9257/*===========================================================================
9258 * FUNCTION : addStreamConfig
9259 *
9260 * DESCRIPTION: adds the stream configuration to the array
9261 *
9262 * PARAMETERS :
9263 * @available_stream_configs : pointer to stream configuration array
9264 * @scalar_format : scalar format
9265 * @dim : configuration dimension
9266 * @config_type : input or output configuration type
9267 *
9268 * RETURN : NONE
9269 *==========================================================================*/
9270void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9271 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9272{
9273 available_stream_configs.add(scalar_format);
9274 available_stream_configs.add(dim.width);
9275 available_stream_configs.add(dim.height);
9276 available_stream_configs.add(config_type);
9277}
9278
9279/*===========================================================================
9280 * FUNCTION : suppportBurstCapture
9281 *
9282 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9283 *
9284 * PARAMETERS :
9285 * @cameraId : camera Id
9286 *
9287 * RETURN : true if camera supports BURST_CAPTURE
9288 * false otherwise
9289 *==========================================================================*/
9290bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9291{
9292 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9293 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9294 const int32_t highResWidth = 3264;
9295 const int32_t highResHeight = 2448;
9296
9297 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9298 // Maximum resolution images cannot be captured at >= 10fps
9299 // -> not supporting BURST_CAPTURE
9300 return false;
9301 }
9302
9303 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9304 // Maximum resolution images can be captured at >= 20fps
9305 // --> supporting BURST_CAPTURE
9306 return true;
9307 }
9308
9309 // Find the smallest highRes resolution, or largest resolution if there is none
9310 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9311 MAX_SIZES_CNT);
9312 size_t highRes = 0;
9313 while ((highRes + 1 < totalCnt) &&
9314 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9315 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9316 highResWidth * highResHeight)) {
9317 highRes++;
9318 }
9319 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9320 return true;
9321 } else {
9322 return false;
9323 }
9324}
9325
9326/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009327 * FUNCTION : getPDStatIndex
9328 *
9329 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9330 *
9331 * PARAMETERS :
9332 * @caps : camera capabilities
9333 *
9334 * RETURN : int32_t type
9335 * non-negative - on success
9336 * -1 - on failure
9337 *==========================================================================*/
9338int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9339 if (nullptr == caps) {
9340 return -1;
9341 }
9342
9343 uint32_t metaRawCount = caps->meta_raw_channel_count;
9344 int32_t ret = -1;
9345 for (size_t i = 0; i < metaRawCount; i++) {
9346 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9347 ret = i;
9348 break;
9349 }
9350 }
9351
9352 return ret;
9353}
9354
9355/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009356 * FUNCTION : initStaticMetadata
9357 *
9358 * DESCRIPTION: initialize the static metadata
9359 *
9360 * PARAMETERS :
9361 * @cameraId : camera Id
9362 *
9363 * RETURN : int32_t type of status
9364 * 0 -- success
9365 * non-zero failure code
9366 *==========================================================================*/
9367int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9368{
9369 int rc = 0;
9370 CameraMetadata staticInfo;
9371 size_t count = 0;
9372 bool limitedDevice = false;
9373 char prop[PROPERTY_VALUE_MAX];
9374 bool supportBurst = false;
9375
9376 supportBurst = supportBurstCapture(cameraId);
9377
9378 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9379 * guaranteed or if min fps of max resolution is less than 20 fps, its
9380 * advertised as limited device*/
9381 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9382 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9383 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9384 !supportBurst;
9385
9386 uint8_t supportedHwLvl = limitedDevice ?
9387 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009388#ifndef USE_HAL_3_3
9389 // LEVEL_3 - This device will support level 3.
9390 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9391#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009392 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009393#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009394
9395 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9396 &supportedHwLvl, 1);
9397
9398 bool facingBack = false;
9399 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9400 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9401 facingBack = true;
9402 }
9403 /*HAL 3 only*/
9404 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9405 &gCamCapability[cameraId]->min_focus_distance, 1);
9406
9407 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9408 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9409
9410 /*should be using focal lengths but sensor doesn't provide that info now*/
9411 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9412 &gCamCapability[cameraId]->focal_length,
9413 1);
9414
9415 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9416 gCamCapability[cameraId]->apertures,
9417 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9418
9419 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9420 gCamCapability[cameraId]->filter_densities,
9421 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9422
9423
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009424 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9425 size_t mode_count =
9426 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9427 for (size_t i = 0; i < mode_count; i++) {
9428 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9429 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009430 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009431 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009432
9433 int32_t lens_shading_map_size[] = {
9434 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9435 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9436 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9437 lens_shading_map_size,
9438 sizeof(lens_shading_map_size)/sizeof(int32_t));
9439
9440 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9441 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9442
9443 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9444 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9445
9446 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9447 &gCamCapability[cameraId]->max_frame_duration, 1);
9448
9449 camera_metadata_rational baseGainFactor = {
9450 gCamCapability[cameraId]->base_gain_factor.numerator,
9451 gCamCapability[cameraId]->base_gain_factor.denominator};
9452 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9453 &baseGainFactor, 1);
9454
9455 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9456 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9457
9458 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9459 gCamCapability[cameraId]->pixel_array_size.height};
9460 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9461 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9462
9463 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9464 gCamCapability[cameraId]->active_array_size.top,
9465 gCamCapability[cameraId]->active_array_size.width,
9466 gCamCapability[cameraId]->active_array_size.height};
9467 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9468 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9469
9470 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9471 &gCamCapability[cameraId]->white_level, 1);
9472
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009473 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9474 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9475 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009476 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009477 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009478
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009479#ifndef USE_HAL_3_3
9480 bool hasBlackRegions = false;
9481 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9482 LOGW("black_region_count: %d is bounded to %d",
9483 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9484 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9485 }
9486 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9487 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9488 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9489 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9490 }
9491 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9492 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9493 hasBlackRegions = true;
9494 }
9495#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009496 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9497 &gCamCapability[cameraId]->flash_charge_duration, 1);
9498
9499 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9500 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9501
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009502 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9503 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9504 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009505 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9506 &timestampSource, 1);
9507
Thierry Strudel54dc9782017-02-15 12:12:10 -08009508 //update histogram vendor data
9509 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009510 &gCamCapability[cameraId]->histogram_size, 1);
9511
Thierry Strudel54dc9782017-02-15 12:12:10 -08009512 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009513 &gCamCapability[cameraId]->max_histogram_count, 1);
9514
Shuzhen Wang14415f52016-11-16 18:26:18 -08009515 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9516 //so that app can request fewer number of bins than the maximum supported.
9517 std::vector<int32_t> histBins;
9518 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9519 histBins.push_back(maxHistBins);
9520 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9521 (maxHistBins & 0x1) == 0) {
9522 histBins.push_back(maxHistBins >> 1);
9523 maxHistBins >>= 1;
9524 }
9525 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9526 histBins.data(), histBins.size());
9527
Thierry Strudel3d639192016-09-09 11:52:26 -07009528 int32_t sharpness_map_size[] = {
9529 gCamCapability[cameraId]->sharpness_map_size.width,
9530 gCamCapability[cameraId]->sharpness_map_size.height};
9531
9532 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9533 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9534
9535 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9536 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9537
Emilian Peev0f3c3162017-03-15 12:57:46 +00009538 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9539 if (0 <= indexPD) {
9540 // Advertise PD stats data as part of the Depth capabilities
9541 int32_t depthWidth =
9542 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9543 int32_t depthHeight =
9544 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009545 int32_t depthStride =
9546 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009547 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9548 assert(0 < depthSamplesCount);
9549 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9550 &depthSamplesCount, 1);
9551
9552 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9553 depthHeight,
9554 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9555 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9556 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9557 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9558 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9559
9560 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9561 depthHeight, 33333333,
9562 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9563 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9564 depthMinDuration,
9565 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9566
9567 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9568 depthHeight, 0,
9569 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9570 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9571 depthStallDuration,
9572 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9573
9574 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9575 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009576
9577 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9578 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9579 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev835938b2017-08-31 16:59:54 +01009580
9581 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS,
9582 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.right_gain_map),
9583 sizeof(gCamCapability[cameraId]->pdaf_cal.right_gain_map));
9584
9585 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS,
9586 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.left_gain_map),
9587 sizeof(gCamCapability[cameraId]->pdaf_cal.left_gain_map));
9588
9589 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF,
9590 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.conversion_coeff),
9591 sizeof(gCamCapability[cameraId]->pdaf_cal.conversion_coeff));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009592 }
9593
Thierry Strudel3d639192016-09-09 11:52:26 -07009594 int32_t scalar_formats[] = {
9595 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9596 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9597 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9598 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9599 HAL_PIXEL_FORMAT_RAW10,
9600 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009601 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9602 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9603 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009604
9605 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9606 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9607 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9608 count, MAX_SIZES_CNT, available_processed_sizes);
9609 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9610 available_processed_sizes, count * 2);
9611
9612 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9613 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9614 makeTable(gCamCapability[cameraId]->raw_dim,
9615 count, MAX_SIZES_CNT, available_raw_sizes);
9616 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9617 available_raw_sizes, count * 2);
9618
9619 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9620 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9621 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9622 count, MAX_SIZES_CNT, available_fps_ranges);
9623 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9624 available_fps_ranges, count * 2);
9625
9626 camera_metadata_rational exposureCompensationStep = {
9627 gCamCapability[cameraId]->exp_compensation_step.numerator,
9628 gCamCapability[cameraId]->exp_compensation_step.denominator};
9629 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9630 &exposureCompensationStep, 1);
9631
9632 Vector<uint8_t> availableVstabModes;
9633 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9634 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009635 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009636 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009637 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009638 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009639 count = IS_TYPE_MAX;
9640 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9641 for (size_t i = 0; i < count; i++) {
9642 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9643 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9644 eisSupported = true;
9645 break;
9646 }
9647 }
9648 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009649 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9650 }
9651 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9652 availableVstabModes.array(), availableVstabModes.size());
9653
9654 /*HAL 1 and HAL 3 common*/
9655 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9656 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9657 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009658 // Cap the max zoom to the max preferred value
9659 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009660 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9661 &maxZoom, 1);
9662
9663 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9664 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9665
9666 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9667 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9668 max3aRegions[2] = 0; /* AF not supported */
9669 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9670 max3aRegions, 3);
9671
9672 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9673 memset(prop, 0, sizeof(prop));
9674 property_get("persist.camera.facedetect", prop, "1");
9675 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9676 LOGD("Support face detection mode: %d",
9677 supportedFaceDetectMode);
9678
9679 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009680 /* support mode should be OFF if max number of face is 0 */
9681 if (maxFaces <= 0) {
9682 supportedFaceDetectMode = 0;
9683 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009684 Vector<uint8_t> availableFaceDetectModes;
9685 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9686 if (supportedFaceDetectMode == 1) {
9687 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9688 } else if (supportedFaceDetectMode == 2) {
9689 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9690 } else if (supportedFaceDetectMode == 3) {
9691 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9692 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9693 } else {
9694 maxFaces = 0;
9695 }
9696 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9697 availableFaceDetectModes.array(),
9698 availableFaceDetectModes.size());
9699 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9700 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009701 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9702 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9703 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009704
9705 int32_t exposureCompensationRange[] = {
9706 gCamCapability[cameraId]->exposure_compensation_min,
9707 gCamCapability[cameraId]->exposure_compensation_max};
9708 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9709 exposureCompensationRange,
9710 sizeof(exposureCompensationRange)/sizeof(int32_t));
9711
9712 uint8_t lensFacing = (facingBack) ?
9713 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9714 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9715
9716 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9717 available_thumbnail_sizes,
9718 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9719
9720 /*all sizes will be clubbed into this tag*/
9721 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9722 /*android.scaler.availableStreamConfigurations*/
9723 Vector<int32_t> available_stream_configs;
9724 cam_dimension_t active_array_dim;
9725 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9726 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009727
9728 /*advertise list of input dimensions supported based on below property.
9729 By default all sizes upto 5MP will be advertised.
9730 Note that the setprop resolution format should be WxH.
9731 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9732 To list all supported sizes, setprop needs to be set with "0x0" */
9733 cam_dimension_t minInputSize = {2592,1944}; //5MP
9734 memset(prop, 0, sizeof(prop));
9735 property_get("persist.camera.input.minsize", prop, "2592x1944");
9736 if (strlen(prop) > 0) {
9737 char *saveptr = NULL;
9738 char *token = strtok_r(prop, "x", &saveptr);
9739 if (token != NULL) {
9740 minInputSize.width = atoi(token);
9741 }
9742 token = strtok_r(NULL, "x", &saveptr);
9743 if (token != NULL) {
9744 minInputSize.height = atoi(token);
9745 }
9746 }
9747
Thierry Strudel3d639192016-09-09 11:52:26 -07009748 /* Add input/output stream configurations for each scalar formats*/
9749 for (size_t j = 0; j < scalar_formats_count; j++) {
9750 switch (scalar_formats[j]) {
9751 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9752 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9753 case HAL_PIXEL_FORMAT_RAW10:
9754 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9755 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9756 addStreamConfig(available_stream_configs, scalar_formats[j],
9757 gCamCapability[cameraId]->raw_dim[i],
9758 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9759 }
9760 break;
9761 case HAL_PIXEL_FORMAT_BLOB:
9762 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9763 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9764 addStreamConfig(available_stream_configs, scalar_formats[j],
9765 gCamCapability[cameraId]->picture_sizes_tbl[i],
9766 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9767 }
9768 break;
9769 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9770 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9771 default:
9772 cam_dimension_t largest_picture_size;
9773 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9774 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9775 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9776 addStreamConfig(available_stream_configs, scalar_formats[j],
9777 gCamCapability[cameraId]->picture_sizes_tbl[i],
9778 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009779 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009780 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9781 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009782 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9783 >= minInputSize.width) || (gCamCapability[cameraId]->
9784 picture_sizes_tbl[i].height >= minInputSize.height)) {
9785 addStreamConfig(available_stream_configs, scalar_formats[j],
9786 gCamCapability[cameraId]->picture_sizes_tbl[i],
9787 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9788 }
9789 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009790 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009791
Thierry Strudel3d639192016-09-09 11:52:26 -07009792 break;
9793 }
9794 }
9795
9796 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9797 available_stream_configs.array(), available_stream_configs.size());
9798 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9799 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9800
9801 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9802 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9803
9804 /* android.scaler.availableMinFrameDurations */
9805 Vector<int64_t> available_min_durations;
9806 for (size_t j = 0; j < scalar_formats_count; j++) {
9807 switch (scalar_formats[j]) {
9808 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9809 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9810 case HAL_PIXEL_FORMAT_RAW10:
9811 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9812 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9813 available_min_durations.add(scalar_formats[j]);
9814 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9815 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9816 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9817 }
9818 break;
9819 default:
9820 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9821 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9822 available_min_durations.add(scalar_formats[j]);
9823 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9824 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9825 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9826 }
9827 break;
9828 }
9829 }
9830 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9831 available_min_durations.array(), available_min_durations.size());
9832
9833 Vector<int32_t> available_hfr_configs;
9834 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9835 int32_t fps = 0;
9836 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9837 case CAM_HFR_MODE_60FPS:
9838 fps = 60;
9839 break;
9840 case CAM_HFR_MODE_90FPS:
9841 fps = 90;
9842 break;
9843 case CAM_HFR_MODE_120FPS:
9844 fps = 120;
9845 break;
9846 case CAM_HFR_MODE_150FPS:
9847 fps = 150;
9848 break;
9849 case CAM_HFR_MODE_180FPS:
9850 fps = 180;
9851 break;
9852 case CAM_HFR_MODE_210FPS:
9853 fps = 210;
9854 break;
9855 case CAM_HFR_MODE_240FPS:
9856 fps = 240;
9857 break;
9858 case CAM_HFR_MODE_480FPS:
9859 fps = 480;
9860 break;
9861 case CAM_HFR_MODE_OFF:
9862 case CAM_HFR_MODE_MAX:
9863 default:
9864 break;
9865 }
9866
9867 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9868 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9869 /* For each HFR frame rate, need to advertise one variable fps range
9870 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9871 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9872 * set by the app. When video recording is started, [120, 120] is
9873 * set. This way sensor configuration does not change when recording
9874 * is started */
9875
9876 /* (width, height, fps_min, fps_max, batch_size_max) */
9877 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9878 j < MAX_SIZES_CNT; j++) {
9879 available_hfr_configs.add(
9880 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9881 available_hfr_configs.add(
9882 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9883 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9884 available_hfr_configs.add(fps);
9885 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9886
9887 /* (width, height, fps_min, fps_max, batch_size_max) */
9888 available_hfr_configs.add(
9889 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9890 available_hfr_configs.add(
9891 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9892 available_hfr_configs.add(fps);
9893 available_hfr_configs.add(fps);
9894 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9895 }
9896 }
9897 }
9898 //Advertise HFR capability only if the property is set
9899 memset(prop, 0, sizeof(prop));
9900 property_get("persist.camera.hal3hfr.enable", prop, "1");
9901 uint8_t hfrEnable = (uint8_t)atoi(prop);
9902
9903 if(hfrEnable && available_hfr_configs.array()) {
9904 staticInfo.update(
9905 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9906 available_hfr_configs.array(), available_hfr_configs.size());
9907 }
9908
9909 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9910 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9911 &max_jpeg_size, 1);
9912
9913 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9914 size_t size = 0;
9915 count = CAM_EFFECT_MODE_MAX;
9916 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9917 for (size_t i = 0; i < count; i++) {
9918 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9919 gCamCapability[cameraId]->supported_effects[i]);
9920 if (NAME_NOT_FOUND != val) {
9921 avail_effects[size] = (uint8_t)val;
9922 size++;
9923 }
9924 }
9925 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9926 avail_effects,
9927 size);
9928
9929 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9930 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9931 size_t supported_scene_modes_cnt = 0;
9932 count = CAM_SCENE_MODE_MAX;
9933 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9934 for (size_t i = 0; i < count; i++) {
9935 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9936 CAM_SCENE_MODE_OFF) {
9937 int val = lookupFwkName(SCENE_MODES_MAP,
9938 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9939 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009940
Thierry Strudel3d639192016-09-09 11:52:26 -07009941 if (NAME_NOT_FOUND != val) {
9942 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9943 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9944 supported_scene_modes_cnt++;
9945 }
9946 }
9947 }
9948 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9949 avail_scene_modes,
9950 supported_scene_modes_cnt);
9951
9952 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9953 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9954 supported_scene_modes_cnt,
9955 CAM_SCENE_MODE_MAX,
9956 scene_mode_overrides,
9957 supported_indexes,
9958 cameraId);
9959
9960 if (supported_scene_modes_cnt == 0) {
9961 supported_scene_modes_cnt = 1;
9962 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9963 }
9964
9965 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9966 scene_mode_overrides, supported_scene_modes_cnt * 3);
9967
9968 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9969 ANDROID_CONTROL_MODE_AUTO,
9970 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9971 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9972 available_control_modes,
9973 3);
9974
9975 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9976 size = 0;
9977 count = CAM_ANTIBANDING_MODE_MAX;
9978 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9979 for (size_t i = 0; i < count; i++) {
9980 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9981 gCamCapability[cameraId]->supported_antibandings[i]);
9982 if (NAME_NOT_FOUND != val) {
9983 avail_antibanding_modes[size] = (uint8_t)val;
9984 size++;
9985 }
9986
9987 }
9988 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9989 avail_antibanding_modes,
9990 size);
9991
9992 uint8_t avail_abberation_modes[] = {
9993 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9994 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9995 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9996 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9997 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9998 if (0 == count) {
9999 // If no aberration correction modes are available for a device, this advertise OFF mode
10000 size = 1;
10001 } else {
10002 // If count is not zero then atleast one among the FAST or HIGH quality is supported
10003 // So, advertize all 3 modes if atleast any one mode is supported as per the
10004 // new M requirement
10005 size = 3;
10006 }
10007 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10008 avail_abberation_modes,
10009 size);
10010
10011 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
10012 size = 0;
10013 count = CAM_FOCUS_MODE_MAX;
10014 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
10015 for (size_t i = 0; i < count; i++) {
10016 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10017 gCamCapability[cameraId]->supported_focus_modes[i]);
10018 if (NAME_NOT_FOUND != val) {
10019 avail_af_modes[size] = (uint8_t)val;
10020 size++;
10021 }
10022 }
10023 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
10024 avail_af_modes,
10025 size);
10026
10027 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
10028 size = 0;
10029 count = CAM_WB_MODE_MAX;
10030 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
10031 for (size_t i = 0; i < count; i++) {
10032 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10033 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10034 gCamCapability[cameraId]->supported_white_balances[i]);
10035 if (NAME_NOT_FOUND != val) {
10036 avail_awb_modes[size] = (uint8_t)val;
10037 size++;
10038 }
10039 }
10040 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
10041 avail_awb_modes,
10042 size);
10043
10044 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
10045 count = CAM_FLASH_FIRING_LEVEL_MAX;
10046 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
10047 count);
10048 for (size_t i = 0; i < count; i++) {
10049 available_flash_levels[i] =
10050 gCamCapability[cameraId]->supported_firing_levels[i];
10051 }
10052 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
10053 available_flash_levels, count);
10054
10055 uint8_t flashAvailable;
10056 if (gCamCapability[cameraId]->flash_available)
10057 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
10058 else
10059 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
10060 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
10061 &flashAvailable, 1);
10062
10063 Vector<uint8_t> avail_ae_modes;
10064 count = CAM_AE_MODE_MAX;
10065 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
10066 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080010067 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
10068 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
10069 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
10070 }
10071 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070010072 }
10073 if (flashAvailable) {
10074 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
10075 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
10076 }
10077 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
10078 avail_ae_modes.array(),
10079 avail_ae_modes.size());
10080
10081 int32_t sensitivity_range[2];
10082 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
10083 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
10084 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
10085 sensitivity_range,
10086 sizeof(sensitivity_range) / sizeof(int32_t));
10087
10088 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10089 &gCamCapability[cameraId]->max_analog_sensitivity,
10090 1);
10091
10092 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
10093 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
10094 &sensor_orientation,
10095 1);
10096
10097 int32_t max_output_streams[] = {
10098 MAX_STALLING_STREAMS,
10099 MAX_PROCESSED_STREAMS,
10100 MAX_RAW_STREAMS};
10101 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
10102 max_output_streams,
10103 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
10104
10105 uint8_t avail_leds = 0;
10106 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
10107 &avail_leds, 0);
10108
10109 uint8_t focus_dist_calibrated;
10110 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
10111 gCamCapability[cameraId]->focus_dist_calibrated);
10112 if (NAME_NOT_FOUND != val) {
10113 focus_dist_calibrated = (uint8_t)val;
10114 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10115 &focus_dist_calibrated, 1);
10116 }
10117
10118 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
10119 size = 0;
10120 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
10121 MAX_TEST_PATTERN_CNT);
10122 for (size_t i = 0; i < count; i++) {
10123 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
10124 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10125 if (NAME_NOT_FOUND != testpatternMode) {
10126 avail_testpattern_modes[size] = testpatternMode;
10127 size++;
10128 }
10129 }
10130 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10131 avail_testpattern_modes,
10132 size);
10133
10134 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10135 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10136 &max_pipeline_depth,
10137 1);
10138
10139 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10140 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10141 &partial_result_count,
10142 1);
10143
10144 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10145 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10146
10147 Vector<uint8_t> available_capabilities;
10148 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10149 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10150 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10151 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10152 if (supportBurst) {
10153 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10154 }
10155 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10156 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10157 if (hfrEnable && available_hfr_configs.array()) {
10158 available_capabilities.add(
10159 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10160 }
10161
10162 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10163 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10164 }
10165 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10166 available_capabilities.array(),
10167 available_capabilities.size());
10168
10169 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10170 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10171 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10172 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10173
10174 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10175 &aeLockAvailable, 1);
10176
10177 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10178 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10179 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10180 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10181
10182 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10183 &awbLockAvailable, 1);
10184
10185 int32_t max_input_streams = 1;
10186 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10187 &max_input_streams,
10188 1);
10189
10190 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10191 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10192 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10193 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10194 HAL_PIXEL_FORMAT_YCbCr_420_888};
10195 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10196 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10197
10198 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10199 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10200 &max_latency,
10201 1);
10202
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010203#ifndef USE_HAL_3_3
10204 int32_t isp_sensitivity_range[2];
10205 isp_sensitivity_range[0] =
10206 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10207 isp_sensitivity_range[1] =
10208 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10209 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10210 isp_sensitivity_range,
10211 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10212#endif
10213
Thierry Strudel3d639192016-09-09 11:52:26 -070010214 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10215 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10216 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10217 available_hot_pixel_modes,
10218 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10219
10220 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10221 ANDROID_SHADING_MODE_FAST,
10222 ANDROID_SHADING_MODE_HIGH_QUALITY};
10223 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10224 available_shading_modes,
10225 3);
10226
10227 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10228 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10229 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10230 available_lens_shading_map_modes,
10231 2);
10232
10233 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10234 ANDROID_EDGE_MODE_FAST,
10235 ANDROID_EDGE_MODE_HIGH_QUALITY,
10236 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10237 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10238 available_edge_modes,
10239 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10240
10241 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10242 ANDROID_NOISE_REDUCTION_MODE_FAST,
10243 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10244 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10245 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10246 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10247 available_noise_red_modes,
10248 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10249
10250 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10251 ANDROID_TONEMAP_MODE_FAST,
10252 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10253 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10254 available_tonemap_modes,
10255 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10256
10257 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10258 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10259 available_hot_pixel_map_modes,
10260 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10261
10262 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10263 gCamCapability[cameraId]->reference_illuminant1);
10264 if (NAME_NOT_FOUND != val) {
10265 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10266 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10267 }
10268
10269 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10270 gCamCapability[cameraId]->reference_illuminant2);
10271 if (NAME_NOT_FOUND != val) {
10272 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10273 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10274 }
10275
10276 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10277 (void *)gCamCapability[cameraId]->forward_matrix1,
10278 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10279
10280 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10281 (void *)gCamCapability[cameraId]->forward_matrix2,
10282 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10283
10284 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10285 (void *)gCamCapability[cameraId]->color_transform1,
10286 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10287
10288 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10289 (void *)gCamCapability[cameraId]->color_transform2,
10290 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10291
10292 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10293 (void *)gCamCapability[cameraId]->calibration_transform1,
10294 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10295
10296 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10297 (void *)gCamCapability[cameraId]->calibration_transform2,
10298 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10299
10300 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10301 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10302 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10303 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10304 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10305 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10306 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10307 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10308 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10309 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10310 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10311 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10312 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10313 ANDROID_JPEG_GPS_COORDINATES,
10314 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10315 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10316 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10317 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10318 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10319 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10320 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10321 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10322 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10323 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010324#ifndef USE_HAL_3_3
10325 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10326#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010327 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010328 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010329 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10330 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010331 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010332 /* DevCamDebug metadata request_keys_basic */
10333 DEVCAMDEBUG_META_ENABLE,
10334 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010335 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010336 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010337 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010338 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010339 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010340 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010341
10342 size_t request_keys_cnt =
10343 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10344 Vector<int32_t> available_request_keys;
10345 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10346 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10347 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10348 }
10349
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010350 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010351 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010352 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070010353 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
Chien-Yu Chenec328c82017-08-30 16:41:08 -070010354 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010355 }
10356
Thierry Strudel3d639192016-09-09 11:52:26 -070010357 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10358 available_request_keys.array(), available_request_keys.size());
10359
10360 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10361 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10362 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10363 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10364 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10365 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10366 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10367 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10368 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10369 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10370 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10371 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10372 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10373 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10374 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10375 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10376 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010377 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010378 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10379 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10380 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010381 ANDROID_STATISTICS_FACE_SCORES,
10382#ifndef USE_HAL_3_3
10383 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10384#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010385 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010386 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010387 // DevCamDebug metadata result_keys_basic
10388 DEVCAMDEBUG_META_ENABLE,
10389 // DevCamDebug metadata result_keys AF
10390 DEVCAMDEBUG_AF_LENS_POSITION,
10391 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10392 DEVCAMDEBUG_AF_TOF_DISTANCE,
10393 DEVCAMDEBUG_AF_LUMA,
10394 DEVCAMDEBUG_AF_HAF_STATE,
10395 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10396 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10397 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10398 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10399 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10400 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10401 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10402 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10403 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10404 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10405 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10406 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10407 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10408 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10409 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10410 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10411 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10412 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10413 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10414 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10415 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10416 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10417 // DevCamDebug metadata result_keys AEC
10418 DEVCAMDEBUG_AEC_TARGET_LUMA,
10419 DEVCAMDEBUG_AEC_COMP_LUMA,
10420 DEVCAMDEBUG_AEC_AVG_LUMA,
10421 DEVCAMDEBUG_AEC_CUR_LUMA,
10422 DEVCAMDEBUG_AEC_LINECOUNT,
10423 DEVCAMDEBUG_AEC_REAL_GAIN,
10424 DEVCAMDEBUG_AEC_EXP_INDEX,
10425 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010426 // DevCamDebug metadata result_keys zzHDR
10427 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10428 DEVCAMDEBUG_AEC_L_LINECOUNT,
10429 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10430 DEVCAMDEBUG_AEC_S_LINECOUNT,
10431 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10432 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10433 // DevCamDebug metadata result_keys ADRC
10434 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10435 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10436 DEVCAMDEBUG_AEC_GTM_RATIO,
10437 DEVCAMDEBUG_AEC_LTM_RATIO,
10438 DEVCAMDEBUG_AEC_LA_RATIO,
10439 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010440 // DevCamDebug metadata result_keys AEC MOTION
10441 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10442 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10443 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010444 // DevCamDebug metadata result_keys AWB
10445 DEVCAMDEBUG_AWB_R_GAIN,
10446 DEVCAMDEBUG_AWB_G_GAIN,
10447 DEVCAMDEBUG_AWB_B_GAIN,
10448 DEVCAMDEBUG_AWB_CCT,
10449 DEVCAMDEBUG_AWB_DECISION,
10450 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010451 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10452 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10453 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010454 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010455 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -070010456 NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010457 };
10458
Thierry Strudel3d639192016-09-09 11:52:26 -070010459 size_t result_keys_cnt =
10460 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10461
10462 Vector<int32_t> available_result_keys;
10463 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10464 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10465 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10466 }
10467 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10468 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10469 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10470 }
10471 if (supportedFaceDetectMode == 1) {
10472 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10473 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10474 } else if ((supportedFaceDetectMode == 2) ||
10475 (supportedFaceDetectMode == 3)) {
10476 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10477 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10478 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010479#ifndef USE_HAL_3_3
Shuzhen Wanga1bc9de2017-09-14 16:54:02 -070010480 {
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010481 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10482 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10483 }
10484#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010485
10486 if (gExposeEnableZslKey) {
10487 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chendaf68892017-08-25 12:56:40 -070010488 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010489 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
10490 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010491 }
10492
Thierry Strudel3d639192016-09-09 11:52:26 -070010493 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10494 available_result_keys.array(), available_result_keys.size());
10495
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010496 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010497 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10498 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10499 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10500 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10501 ANDROID_SCALER_CROPPING_TYPE,
10502 ANDROID_SYNC_MAX_LATENCY,
10503 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10504 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10505 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10506 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10507 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10508 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10509 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10510 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10511 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10512 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10513 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10514 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10515 ANDROID_LENS_FACING,
10516 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10517 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10518 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10519 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10520 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10521 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10522 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10523 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10524 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10525 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10526 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10527 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10528 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10529 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10530 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10531 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10532 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10533 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10534 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10535 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010536 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010537 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10538 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10539 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10540 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10541 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10542 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10543 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10544 ANDROID_CONTROL_AVAILABLE_MODES,
10545 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10546 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10547 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10548 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010549 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10550#ifndef USE_HAL_3_3
10551 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10552 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10553#endif
10554 };
10555
10556 Vector<int32_t> available_characteristics_keys;
10557 available_characteristics_keys.appendArray(characteristics_keys_basic,
10558 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10559#ifndef USE_HAL_3_3
10560 if (hasBlackRegions) {
10561 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10562 }
10563#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010564
10565 if (0 <= indexPD) {
10566 int32_t depthKeys[] = {
10567 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10568 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10569 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10570 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10571 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10572 };
10573 available_characteristics_keys.appendArray(depthKeys,
10574 sizeof(depthKeys) / sizeof(depthKeys[0]));
10575 }
10576
Thierry Strudel3d639192016-09-09 11:52:26 -070010577 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010578 available_characteristics_keys.array(),
10579 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010580
10581 /*available stall durations depend on the hw + sw and will be different for different devices */
10582 /*have to add for raw after implementation*/
10583 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10584 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10585
10586 Vector<int64_t> available_stall_durations;
10587 for (uint32_t j = 0; j < stall_formats_count; j++) {
10588 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10589 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10590 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10591 available_stall_durations.add(stall_formats[j]);
10592 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10593 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10594 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10595 }
10596 } else {
10597 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10598 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10599 available_stall_durations.add(stall_formats[j]);
10600 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10601 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10602 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10603 }
10604 }
10605 }
10606 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10607 available_stall_durations.array(),
10608 available_stall_durations.size());
10609
10610 //QCAMERA3_OPAQUE_RAW
10611 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10612 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10613 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10614 case LEGACY_RAW:
10615 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10616 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10617 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10618 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10619 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10620 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10621 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10622 break;
10623 case MIPI_RAW:
10624 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10625 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10626 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10627 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10628 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10629 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10630 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10631 break;
10632 default:
10633 LOGE("unknown opaque_raw_format %d",
10634 gCamCapability[cameraId]->opaque_raw_fmt);
10635 break;
10636 }
10637 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10638
10639 Vector<int32_t> strides;
10640 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10641 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10642 cam_stream_buf_plane_info_t buf_planes;
10643 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10644 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10645 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10646 &gCamCapability[cameraId]->padding_info, &buf_planes);
10647 strides.add(buf_planes.plane_info.mp[0].stride);
10648 }
10649 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10650 strides.size());
10651
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010652 //TBD: remove the following line once backend advertises zzHDR in feature mask
10653 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010654 //Video HDR default
10655 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10656 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010657 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010658 int32_t vhdr_mode[] = {
10659 QCAMERA3_VIDEO_HDR_MODE_OFF,
10660 QCAMERA3_VIDEO_HDR_MODE_ON};
10661
10662 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10663 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10664 vhdr_mode, vhdr_mode_count);
10665 }
10666
Thierry Strudel3d639192016-09-09 11:52:26 -070010667 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10668 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10669 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10670
10671 uint8_t isMonoOnly =
10672 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10673 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10674 &isMonoOnly, 1);
10675
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010676#ifndef USE_HAL_3_3
10677 Vector<int32_t> opaque_size;
10678 for (size_t j = 0; j < scalar_formats_count; j++) {
10679 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10680 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10681 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10682 cam_stream_buf_plane_info_t buf_planes;
10683
10684 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10685 &gCamCapability[cameraId]->padding_info, &buf_planes);
10686
10687 if (rc == 0) {
10688 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10689 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10690 opaque_size.add(buf_planes.plane_info.frame_len);
10691 }else {
10692 LOGE("raw frame calculation failed!");
10693 }
10694 }
10695 }
10696 }
10697
10698 if ((opaque_size.size() > 0) &&
10699 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10700 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10701 else
10702 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10703#endif
10704
Thierry Strudel04e026f2016-10-10 11:27:36 -070010705 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10706 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10707 size = 0;
10708 count = CAM_IR_MODE_MAX;
10709 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10710 for (size_t i = 0; i < count; i++) {
10711 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10712 gCamCapability[cameraId]->supported_ir_modes[i]);
10713 if (NAME_NOT_FOUND != val) {
10714 avail_ir_modes[size] = (int32_t)val;
10715 size++;
10716 }
10717 }
10718 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10719 avail_ir_modes, size);
10720 }
10721
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010722 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10723 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10724 size = 0;
10725 count = CAM_AEC_CONVERGENCE_MAX;
10726 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10727 for (size_t i = 0; i < count; i++) {
10728 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10729 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10730 if (NAME_NOT_FOUND != val) {
10731 available_instant_aec_modes[size] = (int32_t)val;
10732 size++;
10733 }
10734 }
10735 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10736 available_instant_aec_modes, size);
10737 }
10738
Thierry Strudel54dc9782017-02-15 12:12:10 -080010739 int32_t sharpness_range[] = {
10740 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10741 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10742 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10743
10744 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10745 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10746 size = 0;
10747 count = CAM_BINNING_CORRECTION_MODE_MAX;
10748 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10749 for (size_t i = 0; i < count; i++) {
10750 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10751 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10752 gCamCapability[cameraId]->supported_binning_modes[i]);
10753 if (NAME_NOT_FOUND != val) {
10754 avail_binning_modes[size] = (int32_t)val;
10755 size++;
10756 }
10757 }
10758 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10759 avail_binning_modes, size);
10760 }
10761
10762 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10763 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10764 size = 0;
10765 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10766 for (size_t i = 0; i < count; i++) {
10767 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10768 gCamCapability[cameraId]->supported_aec_modes[i]);
10769 if (NAME_NOT_FOUND != val)
10770 available_aec_modes[size++] = val;
10771 }
10772 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10773 available_aec_modes, size);
10774 }
10775
10776 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10777 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10778 size = 0;
10779 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10780 for (size_t i = 0; i < count; i++) {
10781 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10782 gCamCapability[cameraId]->supported_iso_modes[i]);
10783 if (NAME_NOT_FOUND != val)
10784 available_iso_modes[size++] = val;
10785 }
10786 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10787 available_iso_modes, size);
10788 }
10789
10790 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010791 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010792 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10793 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10794 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10795
10796 int32_t available_saturation_range[4];
10797 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10798 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10799 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10800 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10801 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10802 available_saturation_range, 4);
10803
10804 uint8_t is_hdr_values[2];
10805 is_hdr_values[0] = 0;
10806 is_hdr_values[1] = 1;
10807 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10808 is_hdr_values, 2);
10809
10810 float is_hdr_confidence_range[2];
10811 is_hdr_confidence_range[0] = 0.0;
10812 is_hdr_confidence_range[1] = 1.0;
10813 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10814 is_hdr_confidence_range, 2);
10815
Emilian Peev0a972ef2017-03-16 10:25:53 +000010816 size_t eepromLength = strnlen(
10817 reinterpret_cast<const char *>(
10818 gCamCapability[cameraId]->eeprom_version_info),
10819 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10820 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010821 char easelInfo[] = ",E:N";
10822 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10823 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10824 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010825 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
Arnd Geis082a4d72017-08-24 10:33:07 -070010826 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-ver" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010827 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010828 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010829 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10830 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10831 }
10832
Thierry Strudel3d639192016-09-09 11:52:26 -070010833 gStaticMetadata[cameraId] = staticInfo.release();
10834 return rc;
10835}
10836
10837/*===========================================================================
10838 * FUNCTION : makeTable
10839 *
10840 * DESCRIPTION: make a table of sizes
10841 *
10842 * PARAMETERS :
10843 *
10844 *
10845 *==========================================================================*/
10846void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10847 size_t max_size, int32_t *sizeTable)
10848{
10849 size_t j = 0;
10850 if (size > max_size) {
10851 size = max_size;
10852 }
10853 for (size_t i = 0; i < size; i++) {
10854 sizeTable[j] = dimTable[i].width;
10855 sizeTable[j+1] = dimTable[i].height;
10856 j+=2;
10857 }
10858}
10859
10860/*===========================================================================
10861 * FUNCTION : makeFPSTable
10862 *
10863 * DESCRIPTION: make a table of fps ranges
10864 *
10865 * PARAMETERS :
10866 *
10867 *==========================================================================*/
10868void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10869 size_t max_size, int32_t *fpsRangesTable)
10870{
10871 size_t j = 0;
10872 if (size > max_size) {
10873 size = max_size;
10874 }
10875 for (size_t i = 0; i < size; i++) {
10876 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10877 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10878 j+=2;
10879 }
10880}
10881
10882/*===========================================================================
10883 * FUNCTION : makeOverridesList
10884 *
10885 * DESCRIPTION: make a list of scene mode overrides
10886 *
10887 * PARAMETERS :
10888 *
10889 *
10890 *==========================================================================*/
10891void QCamera3HardwareInterface::makeOverridesList(
10892 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10893 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10894{
10895 /*daemon will give a list of overrides for all scene modes.
10896 However we should send the fwk only the overrides for the scene modes
10897 supported by the framework*/
10898 size_t j = 0;
10899 if (size > max_size) {
10900 size = max_size;
10901 }
10902 size_t focus_count = CAM_FOCUS_MODE_MAX;
10903 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10904 focus_count);
10905 for (size_t i = 0; i < size; i++) {
10906 bool supt = false;
10907 size_t index = supported_indexes[i];
10908 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10909 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10910 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10911 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10912 overridesTable[index].awb_mode);
10913 if (NAME_NOT_FOUND != val) {
10914 overridesList[j+1] = (uint8_t)val;
10915 }
10916 uint8_t focus_override = overridesTable[index].af_mode;
10917 for (size_t k = 0; k < focus_count; k++) {
10918 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10919 supt = true;
10920 break;
10921 }
10922 }
10923 if (supt) {
10924 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10925 focus_override);
10926 if (NAME_NOT_FOUND != val) {
10927 overridesList[j+2] = (uint8_t)val;
10928 }
10929 } else {
10930 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10931 }
10932 j+=3;
10933 }
10934}
10935
10936/*===========================================================================
10937 * FUNCTION : filterJpegSizes
10938 *
10939 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10940 * could be downscaled to
10941 *
10942 * PARAMETERS :
10943 *
10944 * RETURN : length of jpegSizes array
10945 *==========================================================================*/
10946
10947size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10948 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10949 uint8_t downscale_factor)
10950{
10951 if (0 == downscale_factor) {
10952 downscale_factor = 1;
10953 }
10954
10955 int32_t min_width = active_array_size.width / downscale_factor;
10956 int32_t min_height = active_array_size.height / downscale_factor;
10957 size_t jpegSizesCnt = 0;
10958 if (processedSizesCnt > maxCount) {
10959 processedSizesCnt = maxCount;
10960 }
10961 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10962 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10963 jpegSizes[jpegSizesCnt] = processedSizes[i];
10964 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10965 jpegSizesCnt += 2;
10966 }
10967 }
10968 return jpegSizesCnt;
10969}
10970
10971/*===========================================================================
10972 * FUNCTION : computeNoiseModelEntryS
10973 *
10974 * DESCRIPTION: function to map a given sensitivity to the S noise
10975 * model parameters in the DNG noise model.
10976 *
10977 * PARAMETERS : sens : the sensor sensitivity
10978 *
10979 ** RETURN : S (sensor amplification) noise
10980 *
10981 *==========================================================================*/
10982double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10983 double s = gCamCapability[mCameraId]->gradient_S * sens +
10984 gCamCapability[mCameraId]->offset_S;
10985 return ((s < 0.0) ? 0.0 : s);
10986}
10987
10988/*===========================================================================
10989 * FUNCTION : computeNoiseModelEntryO
10990 *
10991 * DESCRIPTION: function to map a given sensitivity to the O noise
10992 * model parameters in the DNG noise model.
10993 *
10994 * PARAMETERS : sens : the sensor sensitivity
10995 *
10996 ** RETURN : O (sensor readout) noise
10997 *
10998 *==========================================================================*/
10999double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
11000 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
11001 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
11002 1.0 : (1.0 * sens / max_analog_sens);
11003 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
11004 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
11005 return ((o < 0.0) ? 0.0 : o);
11006}
11007
11008/*===========================================================================
11009 * FUNCTION : getSensorSensitivity
11010 *
11011 * DESCRIPTION: convert iso_mode to an integer value
11012 *
11013 * PARAMETERS : iso_mode : the iso_mode supported by sensor
11014 *
11015 ** RETURN : sensitivity supported by sensor
11016 *
11017 *==========================================================================*/
11018int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
11019{
11020 int32_t sensitivity;
11021
11022 switch (iso_mode) {
11023 case CAM_ISO_MODE_100:
11024 sensitivity = 100;
11025 break;
11026 case CAM_ISO_MODE_200:
11027 sensitivity = 200;
11028 break;
11029 case CAM_ISO_MODE_400:
11030 sensitivity = 400;
11031 break;
11032 case CAM_ISO_MODE_800:
11033 sensitivity = 800;
11034 break;
11035 case CAM_ISO_MODE_1600:
11036 sensitivity = 1600;
11037 break;
11038 default:
11039 sensitivity = -1;
11040 break;
11041 }
11042 return sensitivity;
11043}
11044
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011045int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011046 if (gEaselManagerClient == nullptr) {
11047 gEaselManagerClient = EaselManagerClient::create();
11048 if (gEaselManagerClient == nullptr) {
11049 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
11050 return -ENODEV;
11051 }
11052 }
11053
11054 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070011055 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
11056 // to connect to Easel.
11057 bool doNotpowerOnEasel =
11058 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
11059
11060 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070011061 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
11062 return OK;
11063 }
11064
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011065 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011066 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011067 if (res != OK) {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070011068 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res),
11069 res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011070 return res;
11071 }
11072
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011073 EaselManagerClientOpened = true;
11074
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011075 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011076 if (res != OK) {
11077 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
11078 }
11079
Zhijun Hedaacd8a2017-09-14 12:07:42 -070011080 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070011081 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070011082 gEnableMultipleHdrplusOutputs =
11083 property_get_bool("persist.camera.hdrplus.multiple_outputs", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011084
11085 // Expose enableZsl key only when HDR+ mode is enabled.
11086 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011087 }
11088
11089 return OK;
11090}
11091
Thierry Strudel3d639192016-09-09 11:52:26 -070011092/*===========================================================================
11093 * FUNCTION : getCamInfo
11094 *
11095 * DESCRIPTION: query camera capabilities
11096 *
11097 * PARAMETERS :
11098 * @cameraId : camera Id
11099 * @info : camera info struct to be filled in with camera capabilities
11100 *
11101 * RETURN : int type of status
11102 * NO_ERROR -- success
11103 * none-zero failure code
11104 *==========================================================================*/
11105int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
11106 struct camera_info *info)
11107{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011108 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070011109 int rc = 0;
11110
11111 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070011112
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011113 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070011114 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011115 rc = initHdrPlusClientLocked();
11116 if (rc != OK) {
11117 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
11118 pthread_mutex_unlock(&gCamLock);
11119 return rc;
11120 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070011121 }
11122
Thierry Strudel3d639192016-09-09 11:52:26 -070011123 if (NULL == gCamCapability[cameraId]) {
11124 rc = initCapabilities(cameraId);
11125 if (rc < 0) {
11126 pthread_mutex_unlock(&gCamLock);
11127 return rc;
11128 }
11129 }
11130
11131 if (NULL == gStaticMetadata[cameraId]) {
11132 rc = initStaticMetadata(cameraId);
11133 if (rc < 0) {
11134 pthread_mutex_unlock(&gCamLock);
11135 return rc;
11136 }
11137 }
11138
11139 switch(gCamCapability[cameraId]->position) {
11140 case CAM_POSITION_BACK:
11141 case CAM_POSITION_BACK_AUX:
11142 info->facing = CAMERA_FACING_BACK;
11143 break;
11144
11145 case CAM_POSITION_FRONT:
11146 case CAM_POSITION_FRONT_AUX:
11147 info->facing = CAMERA_FACING_FRONT;
11148 break;
11149
11150 default:
11151 LOGE("Unknown position type %d for camera id:%d",
11152 gCamCapability[cameraId]->position, cameraId);
11153 rc = -1;
11154 break;
11155 }
11156
11157
11158 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011159#ifndef USE_HAL_3_3
11160 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11161#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011162 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011163#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011164 info->static_camera_characteristics = gStaticMetadata[cameraId];
11165
11166 //For now assume both cameras can operate independently.
11167 info->conflicting_devices = NULL;
11168 info->conflicting_devices_length = 0;
11169
11170 //resource cost is 100 * MIN(1.0, m/M),
11171 //where m is throughput requirement with maximum stream configuration
11172 //and M is CPP maximum throughput.
11173 float max_fps = 0.0;
11174 for (uint32_t i = 0;
11175 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11176 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11177 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11178 }
11179 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11180 gCamCapability[cameraId]->active_array_size.width *
11181 gCamCapability[cameraId]->active_array_size.height * max_fps /
11182 gCamCapability[cameraId]->max_pixel_bandwidth;
11183 info->resource_cost = 100 * MIN(1.0, ratio);
11184 LOGI("camera %d resource cost is %d", cameraId,
11185 info->resource_cost);
11186
11187 pthread_mutex_unlock(&gCamLock);
11188 return rc;
11189}
11190
11191/*===========================================================================
11192 * FUNCTION : translateCapabilityToMetadata
11193 *
11194 * DESCRIPTION: translate the capability into camera_metadata_t
11195 *
11196 * PARAMETERS : type of the request
11197 *
11198 *
11199 * RETURN : success: camera_metadata_t*
11200 * failure: NULL
11201 *
11202 *==========================================================================*/
11203camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11204{
11205 if (mDefaultMetadata[type] != NULL) {
11206 return mDefaultMetadata[type];
11207 }
11208 //first time we are handling this request
11209 //fill up the metadata structure using the wrapper class
11210 CameraMetadata settings;
11211 //translate from cam_capability_t to camera_metadata_tag_t
11212 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11213 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11214 int32_t defaultRequestID = 0;
11215 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11216
11217 /* OIS disable */
11218 char ois_prop[PROPERTY_VALUE_MAX];
11219 memset(ois_prop, 0, sizeof(ois_prop));
11220 property_get("persist.camera.ois.disable", ois_prop, "0");
11221 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11222
11223 /* Force video to use OIS */
11224 char videoOisProp[PROPERTY_VALUE_MAX];
11225 memset(videoOisProp, 0, sizeof(videoOisProp));
11226 property_get("persist.camera.ois.video", videoOisProp, "1");
11227 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011228
11229 // Hybrid AE enable/disable
11230 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11231 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11232 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011233 uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011234
Thierry Strudel3d639192016-09-09 11:52:26 -070011235 uint8_t controlIntent = 0;
11236 uint8_t focusMode;
11237 uint8_t vsMode;
11238 uint8_t optStabMode;
11239 uint8_t cacMode;
11240 uint8_t edge_mode;
11241 uint8_t noise_red_mode;
11242 uint8_t tonemap_mode;
11243 bool highQualityModeEntryAvailable = FALSE;
11244 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011245 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011246 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11247 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011248 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011249 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011250 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011251
Thierry Strudel3d639192016-09-09 11:52:26 -070011252 switch (type) {
11253 case CAMERA3_TEMPLATE_PREVIEW:
11254 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11255 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11256 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11257 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11258 edge_mode = ANDROID_EDGE_MODE_FAST;
11259 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11260 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11261 break;
11262 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11263 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11264 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11265 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11266 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11267 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11268 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11269 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11270 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11271 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11272 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11273 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11274 highQualityModeEntryAvailable = TRUE;
11275 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11276 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11277 fastModeEntryAvailable = TRUE;
11278 }
11279 }
11280 if (highQualityModeEntryAvailable) {
11281 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11282 } else if (fastModeEntryAvailable) {
11283 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11284 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011285 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11286 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11287 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011288 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011289 break;
11290 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11291 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11292 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11293 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011294 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11295 edge_mode = ANDROID_EDGE_MODE_FAST;
11296 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11297 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11298 if (forceVideoOis)
11299 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11300 break;
11301 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11302 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11303 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11304 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011305 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11306 edge_mode = ANDROID_EDGE_MODE_FAST;
11307 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11308 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11309 if (forceVideoOis)
11310 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11311 break;
11312 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11313 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11314 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11315 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11316 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11317 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11318 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11319 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11320 break;
11321 case CAMERA3_TEMPLATE_MANUAL:
11322 edge_mode = ANDROID_EDGE_MODE_FAST;
11323 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11324 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11325 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11326 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11327 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11328 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11329 break;
11330 default:
11331 edge_mode = ANDROID_EDGE_MODE_FAST;
11332 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11333 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11334 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11335 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11336 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11337 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11338 break;
11339 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011340 // Set CAC to OFF if underlying device doesn't support
11341 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11342 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11343 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011344 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11345 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11346 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11347 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11348 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11349 }
11350 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011351 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011352 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011353
11354 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11355 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11356 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11357 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11358 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11359 || ois_disable)
11360 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11361 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011362 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011363
11364 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11365 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11366
11367 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11368 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11369
11370 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11371 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11372
11373 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11374 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11375
11376 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11377 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11378
11379 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11380 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11381
11382 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11383 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11384
11385 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11386 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11387
11388 /*flash*/
11389 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11390 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11391
11392 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11393 settings.update(ANDROID_FLASH_FIRING_POWER,
11394 &flashFiringLevel, 1);
11395
11396 /* lens */
11397 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11398 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11399
11400 if (gCamCapability[mCameraId]->filter_densities_count) {
11401 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11402 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11403 gCamCapability[mCameraId]->filter_densities_count);
11404 }
11405
11406 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11407 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11408
Thierry Strudel3d639192016-09-09 11:52:26 -070011409 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11410 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11411
11412 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11413 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11414
11415 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11416 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11417
11418 /* face detection (default to OFF) */
11419 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11420 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11421
Thierry Strudel54dc9782017-02-15 12:12:10 -080011422 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11423 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011424
11425 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11426 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11427
11428 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11429 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11430
Thierry Strudel3d639192016-09-09 11:52:26 -070011431
11432 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11433 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11434
11435 /* Exposure time(Update the Min Exposure Time)*/
11436 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11437 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11438
11439 /* frame duration */
11440 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11441 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11442
11443 /* sensitivity */
11444 static const int32_t default_sensitivity = 100;
11445 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011446#ifndef USE_HAL_3_3
11447 static const int32_t default_isp_sensitivity =
11448 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11449 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11450#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011451
11452 /*edge mode*/
11453 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11454
11455 /*noise reduction mode*/
11456 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11457
11458 /*color correction mode*/
11459 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11460 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11461
11462 /*transform matrix mode*/
11463 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11464
11465 int32_t scaler_crop_region[4];
11466 scaler_crop_region[0] = 0;
11467 scaler_crop_region[1] = 0;
11468 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11469 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11470 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11471
11472 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11473 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11474
11475 /*focus distance*/
11476 float focus_distance = 0.0;
11477 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11478
11479 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011480 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011481 float max_range = 0.0;
11482 float max_fixed_fps = 0.0;
11483 int32_t fps_range[2] = {0, 0};
11484 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11485 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011486 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11487 TEMPLATE_MAX_PREVIEW_FPS) {
11488 continue;
11489 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011490 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11491 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11492 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11493 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11494 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11495 if (range > max_range) {
11496 fps_range[0] =
11497 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11498 fps_range[1] =
11499 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11500 max_range = range;
11501 }
11502 } else {
11503 if (range < 0.01 && max_fixed_fps <
11504 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11505 fps_range[0] =
11506 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11507 fps_range[1] =
11508 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11509 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11510 }
11511 }
11512 }
11513 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11514
11515 /*precapture trigger*/
11516 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11517 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11518
11519 /*af trigger*/
11520 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11521 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11522
11523 /* ae & af regions */
11524 int32_t active_region[] = {
11525 gCamCapability[mCameraId]->active_array_size.left,
11526 gCamCapability[mCameraId]->active_array_size.top,
11527 gCamCapability[mCameraId]->active_array_size.left +
11528 gCamCapability[mCameraId]->active_array_size.width,
11529 gCamCapability[mCameraId]->active_array_size.top +
11530 gCamCapability[mCameraId]->active_array_size.height,
11531 0};
11532 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11533 sizeof(active_region) / sizeof(active_region[0]));
11534 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11535 sizeof(active_region) / sizeof(active_region[0]));
11536
11537 /* black level lock */
11538 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11539 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11540
Thierry Strudel3d639192016-09-09 11:52:26 -070011541 //special defaults for manual template
11542 if (type == CAMERA3_TEMPLATE_MANUAL) {
11543 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11544 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11545
11546 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11547 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11548
11549 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11550 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11551
11552 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11553 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11554
11555 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11556 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11557
11558 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11559 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11560 }
11561
11562
11563 /* TNR
11564 * We'll use this location to determine which modes TNR will be set.
11565 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11566 * This is not to be confused with linking on a per stream basis that decision
11567 * is still on per-session basis and will be handled as part of config stream
11568 */
11569 uint8_t tnr_enable = 0;
11570
11571 if (m_bTnrPreview || m_bTnrVideo) {
11572
11573 switch (type) {
11574 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11575 tnr_enable = 1;
11576 break;
11577
11578 default:
11579 tnr_enable = 0;
11580 break;
11581 }
11582
11583 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11584 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11585 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11586
11587 LOGD("TNR:%d with process plate %d for template:%d",
11588 tnr_enable, tnr_process_type, type);
11589 }
11590
11591 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011592 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011593 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11594
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011595 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011596 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11597
Shuzhen Wang920ea402017-05-03 08:49:39 -070011598 uint8_t related_camera_id = mCameraId;
11599 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011600
11601 /* CDS default */
11602 char prop[PROPERTY_VALUE_MAX];
11603 memset(prop, 0, sizeof(prop));
11604 property_get("persist.camera.CDS", prop, "Auto");
11605 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11606 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11607 if (CAM_CDS_MODE_MAX == cds_mode) {
11608 cds_mode = CAM_CDS_MODE_AUTO;
11609 }
11610
11611 /* Disabling CDS in templates which have TNR enabled*/
11612 if (tnr_enable)
11613 cds_mode = CAM_CDS_MODE_OFF;
11614
11615 int32_t mode = cds_mode;
11616 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011617
Thierry Strudel269c81a2016-10-12 12:13:59 -070011618 /* Manual Convergence AEC Speed is disabled by default*/
11619 float default_aec_speed = 0;
11620 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11621
11622 /* Manual Convergence AWB Speed is disabled by default*/
11623 float default_awb_speed = 0;
11624 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11625
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011626 // Set instant AEC to normal convergence by default
11627 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11628 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11629
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011630 if (gExposeEnableZslKey) {
11631 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070011632 int32_t postview = 0;
11633 settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070011634 int32_t continuousZslCapture = 0;
11635 settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -070011636 // Disable HDR+ for templates other than CAMERA3_TEMPLATE_STILL_CAPTURE and
11637 // CAMERA3_TEMPLATE_PREVIEW.
11638 int32_t disableHdrplus = (type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11639 type == CAMERA3_TEMPLATE_PREVIEW) ? 0 : 1;
Chien-Yu Chenec328c82017-08-30 16:41:08 -070011640 settings.update(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS, &disableHdrplus, 1);
11641
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011642 // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
11643 // hybrid ae is enabled for 3rd party app HDR+.
11644 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11645 type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
11646 hybrid_ae = 1;
11647 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011648 }
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011649 /* hybrid ae */
11650 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011651
Thierry Strudel3d639192016-09-09 11:52:26 -070011652 mDefaultMetadata[type] = settings.release();
11653
11654 return mDefaultMetadata[type];
11655}
11656
11657/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011658 * FUNCTION : getExpectedFrameDuration
11659 *
11660 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11661 * duration
11662 *
11663 * PARAMETERS :
11664 * @request : request settings
11665 * @frameDuration : The maximum frame duration in nanoseconds
11666 *
11667 * RETURN : None
11668 *==========================================================================*/
11669void QCamera3HardwareInterface::getExpectedFrameDuration(
11670 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11671 if (nullptr == frameDuration) {
11672 return;
11673 }
11674
11675 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11676 find_camera_metadata_ro_entry(request,
11677 ANDROID_SENSOR_EXPOSURE_TIME,
11678 &e);
11679 if (e.count > 0) {
11680 *frameDuration = e.data.i64[0];
11681 }
11682 find_camera_metadata_ro_entry(request,
11683 ANDROID_SENSOR_FRAME_DURATION,
11684 &e);
11685 if (e.count > 0) {
11686 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11687 }
11688}
11689
11690/*===========================================================================
11691 * FUNCTION : calculateMaxExpectedDuration
11692 *
11693 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11694 * current camera settings.
11695 *
11696 * PARAMETERS :
11697 * @request : request settings
11698 *
11699 * RETURN : Expected frame duration in nanoseconds.
11700 *==========================================================================*/
11701nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11702 const camera_metadata_t *request) {
11703 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11704 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11705 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11706 if (e.count == 0) {
11707 return maxExpectedDuration;
11708 }
11709
11710 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11711 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11712 }
11713
11714 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11715 return maxExpectedDuration;
11716 }
11717
11718 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11719 if (e.count == 0) {
11720 return maxExpectedDuration;
11721 }
11722
11723 switch (e.data.u8[0]) {
11724 case ANDROID_CONTROL_AE_MODE_OFF:
11725 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11726 break;
11727 default:
11728 find_camera_metadata_ro_entry(request,
11729 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11730 &e);
11731 if (e.count > 1) {
11732 maxExpectedDuration = 1e9 / e.data.u8[0];
11733 }
11734 break;
11735 }
11736
11737 return maxExpectedDuration;
11738}
11739
11740/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011741 * FUNCTION : setFrameParameters
11742 *
11743 * DESCRIPTION: set parameters per frame as requested in the metadata from
11744 * framework
11745 *
11746 * PARAMETERS :
11747 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011748 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011749 * @blob_request: Whether this request is a blob request or not
11750 *
11751 * RETURN : success: NO_ERROR
11752 * failure:
11753 *==========================================================================*/
11754int QCamera3HardwareInterface::setFrameParameters(
11755 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011756 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011757 int blob_request,
11758 uint32_t snapshotStreamId)
11759{
11760 /*translate from camera_metadata_t type to parm_type_t*/
11761 int rc = 0;
11762 int32_t hal_version = CAM_HAL_V3;
11763
11764 clear_metadata_buffer(mParameters);
11765 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11766 LOGE("Failed to set hal version in the parameters");
11767 return BAD_VALUE;
11768 }
11769
11770 /*we need to update the frame number in the parameters*/
11771 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11772 request->frame_number)) {
11773 LOGE("Failed to set the frame number in the parameters");
11774 return BAD_VALUE;
11775 }
11776
11777 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011778 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011779 LOGE("Failed to set stream type mask in the parameters");
11780 return BAD_VALUE;
11781 }
11782
11783 if (mUpdateDebugLevel) {
11784 uint32_t dummyDebugLevel = 0;
11785 /* The value of dummyDebugLevel is irrelavent. On
11786 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11787 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11788 dummyDebugLevel)) {
11789 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11790 return BAD_VALUE;
11791 }
11792 mUpdateDebugLevel = false;
11793 }
11794
11795 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011796 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011797 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11798 if (blob_request)
11799 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11800 }
11801
11802 return rc;
11803}
11804
11805/*===========================================================================
11806 * FUNCTION : setReprocParameters
11807 *
11808 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11809 * return it.
11810 *
11811 * PARAMETERS :
11812 * @request : request that needs to be serviced
11813 *
11814 * RETURN : success: NO_ERROR
11815 * failure:
11816 *==========================================================================*/
11817int32_t QCamera3HardwareInterface::setReprocParameters(
11818 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11819 uint32_t snapshotStreamId)
11820{
11821 /*translate from camera_metadata_t type to parm_type_t*/
11822 int rc = 0;
11823
11824 if (NULL == request->settings){
11825 LOGE("Reprocess settings cannot be NULL");
11826 return BAD_VALUE;
11827 }
11828
11829 if (NULL == reprocParam) {
11830 LOGE("Invalid reprocessing metadata buffer");
11831 return BAD_VALUE;
11832 }
11833 clear_metadata_buffer(reprocParam);
11834
11835 /*we need to update the frame number in the parameters*/
11836 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11837 request->frame_number)) {
11838 LOGE("Failed to set the frame number in the parameters");
11839 return BAD_VALUE;
11840 }
11841
11842 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11843 if (rc < 0) {
11844 LOGE("Failed to translate reproc request");
11845 return rc;
11846 }
11847
11848 CameraMetadata frame_settings;
11849 frame_settings = request->settings;
11850 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11851 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11852 int32_t *crop_count =
11853 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11854 int32_t *crop_data =
11855 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11856 int32_t *roi_map =
11857 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11858 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11859 cam_crop_data_t crop_meta;
11860 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11861 crop_meta.num_of_streams = 1;
11862 crop_meta.crop_info[0].crop.left = crop_data[0];
11863 crop_meta.crop_info[0].crop.top = crop_data[1];
11864 crop_meta.crop_info[0].crop.width = crop_data[2];
11865 crop_meta.crop_info[0].crop.height = crop_data[3];
11866
11867 crop_meta.crop_info[0].roi_map.left =
11868 roi_map[0];
11869 crop_meta.crop_info[0].roi_map.top =
11870 roi_map[1];
11871 crop_meta.crop_info[0].roi_map.width =
11872 roi_map[2];
11873 crop_meta.crop_info[0].roi_map.height =
11874 roi_map[3];
11875
11876 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11877 rc = BAD_VALUE;
11878 }
11879 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11880 request->input_buffer->stream,
11881 crop_meta.crop_info[0].crop.left,
11882 crop_meta.crop_info[0].crop.top,
11883 crop_meta.crop_info[0].crop.width,
11884 crop_meta.crop_info[0].crop.height);
11885 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11886 request->input_buffer->stream,
11887 crop_meta.crop_info[0].roi_map.left,
11888 crop_meta.crop_info[0].roi_map.top,
11889 crop_meta.crop_info[0].roi_map.width,
11890 crop_meta.crop_info[0].roi_map.height);
11891 } else {
11892 LOGE("Invalid reprocess crop count %d!", *crop_count);
11893 }
11894 } else {
11895 LOGE("No crop data from matching output stream");
11896 }
11897
11898 /* These settings are not needed for regular requests so handle them specially for
11899 reprocess requests; information needed for EXIF tags */
11900 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11901 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11902 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11903 if (NAME_NOT_FOUND != val) {
11904 uint32_t flashMode = (uint32_t)val;
11905 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11906 rc = BAD_VALUE;
11907 }
11908 } else {
11909 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11910 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11911 }
11912 } else {
11913 LOGH("No flash mode in reprocess settings");
11914 }
11915
11916 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11917 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11918 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11919 rc = BAD_VALUE;
11920 }
11921 } else {
11922 LOGH("No flash state in reprocess settings");
11923 }
11924
11925 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11926 uint8_t *reprocessFlags =
11927 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11928 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11929 *reprocessFlags)) {
11930 rc = BAD_VALUE;
11931 }
11932 }
11933
Thierry Strudel54dc9782017-02-15 12:12:10 -080011934 // Add exif debug data to internal metadata
11935 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11936 mm_jpeg_debug_exif_params_t *debug_params =
11937 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11938 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11939 // AE
11940 if (debug_params->ae_debug_params_valid == TRUE) {
11941 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11942 debug_params->ae_debug_params);
11943 }
11944 // AWB
11945 if (debug_params->awb_debug_params_valid == TRUE) {
11946 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11947 debug_params->awb_debug_params);
11948 }
11949 // AF
11950 if (debug_params->af_debug_params_valid == TRUE) {
11951 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11952 debug_params->af_debug_params);
11953 }
11954 // ASD
11955 if (debug_params->asd_debug_params_valid == TRUE) {
11956 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11957 debug_params->asd_debug_params);
11958 }
11959 // Stats
11960 if (debug_params->stats_debug_params_valid == TRUE) {
11961 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11962 debug_params->stats_debug_params);
11963 }
11964 // BE Stats
11965 if (debug_params->bestats_debug_params_valid == TRUE) {
11966 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11967 debug_params->bestats_debug_params);
11968 }
11969 // BHIST
11970 if (debug_params->bhist_debug_params_valid == TRUE) {
11971 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11972 debug_params->bhist_debug_params);
11973 }
11974 // 3A Tuning
11975 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11976 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11977 debug_params->q3a_tuning_debug_params);
11978 }
11979 }
11980
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011981 // Add metadata which reprocess needs
11982 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11983 cam_reprocess_info_t *repro_info =
11984 (cam_reprocess_info_t *)frame_settings.find
11985 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011986 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011987 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011988 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011989 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011990 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011991 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011992 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011993 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011994 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011995 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011996 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011997 repro_info->pipeline_flip);
11998 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11999 repro_info->af_roi);
12000 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
12001 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070012002 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
12003 CAM_INTF_PARM_ROTATION metadata then has been added in
12004 translateToHalMetadata. HAL need to keep this new rotation
12005 metadata. Otherwise, the old rotation info saved in the vendor tag
12006 would be used */
12007 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
12008 CAM_INTF_PARM_ROTATION, reprocParam) {
12009 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
12010 } else {
12011 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012012 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070012013 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012014 }
12015
12016 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
12017 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
12018 roi.width and roi.height would be the final JPEG size.
12019 For now, HAL only checks this for reprocess request */
12020 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
12021 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
12022 uint8_t *enable =
12023 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
12024 if (*enable == TRUE) {
12025 int32_t *crop_data =
12026 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
12027 cam_stream_crop_info_t crop_meta;
12028 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
12029 crop_meta.stream_id = 0;
12030 crop_meta.crop.left = crop_data[0];
12031 crop_meta.crop.top = crop_data[1];
12032 crop_meta.crop.width = crop_data[2];
12033 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012034 // The JPEG crop roi should match cpp output size
12035 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
12036 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
12037 crop_meta.roi_map.left = 0;
12038 crop_meta.roi_map.top = 0;
12039 crop_meta.roi_map.width = cpp_crop->crop.width;
12040 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070012041 }
12042 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
12043 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012044 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070012045 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012046 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
12047 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070012048 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012049 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
12050
12051 // Add JPEG scale information
12052 cam_dimension_t scale_dim;
12053 memset(&scale_dim, 0, sizeof(cam_dimension_t));
12054 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
12055 int32_t *roi =
12056 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
12057 scale_dim.width = roi[2];
12058 scale_dim.height = roi[3];
12059 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
12060 scale_dim);
12061 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
12062 scale_dim.width, scale_dim.height, mCameraId);
12063 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012064 }
12065 }
12066
12067 return rc;
12068}
12069
12070/*===========================================================================
12071 * FUNCTION : saveRequestSettings
12072 *
12073 * DESCRIPTION: Add any settings that might have changed to the request settings
12074 * and save the settings to be applied on the frame
12075 *
12076 * PARAMETERS :
12077 * @jpegMetadata : the extracted and/or modified jpeg metadata
12078 * @request : request with initial settings
12079 *
12080 * RETURN :
12081 * camera_metadata_t* : pointer to the saved request settings
12082 *==========================================================================*/
12083camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
12084 const CameraMetadata &jpegMetadata,
12085 camera3_capture_request_t *request)
12086{
12087 camera_metadata_t *resultMetadata;
12088 CameraMetadata camMetadata;
12089 camMetadata = request->settings;
12090
12091 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12092 int32_t thumbnail_size[2];
12093 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12094 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12095 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
12096 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
12097 }
12098
12099 if (request->input_buffer != NULL) {
12100 uint8_t reprocessFlags = 1;
12101 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
12102 (uint8_t*)&reprocessFlags,
12103 sizeof(reprocessFlags));
12104 }
12105
12106 resultMetadata = camMetadata.release();
12107 return resultMetadata;
12108}
12109
12110/*===========================================================================
12111 * FUNCTION : setHalFpsRange
12112 *
12113 * DESCRIPTION: set FPS range parameter
12114 *
12115 *
12116 * PARAMETERS :
12117 * @settings : Metadata from framework
12118 * @hal_metadata: Metadata buffer
12119 *
12120 *
12121 * RETURN : success: NO_ERROR
12122 * failure:
12123 *==========================================================================*/
12124int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
12125 metadata_buffer_t *hal_metadata)
12126{
12127 int32_t rc = NO_ERROR;
12128 cam_fps_range_t fps_range;
12129 fps_range.min_fps = (float)
12130 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
12131 fps_range.max_fps = (float)
12132 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
12133 fps_range.video_min_fps = fps_range.min_fps;
12134 fps_range.video_max_fps = fps_range.max_fps;
12135
12136 LOGD("aeTargetFpsRange fps: [%f %f]",
12137 fps_range.min_fps, fps_range.max_fps);
12138 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
12139 * follows:
12140 * ---------------------------------------------------------------|
12141 * Video stream is absent in configure_streams |
12142 * (Camcorder preview before the first video record |
12143 * ---------------------------------------------------------------|
12144 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12145 * | | | vid_min/max_fps|
12146 * ---------------------------------------------------------------|
12147 * NO | [ 30, 240] | 240 | [240, 240] |
12148 * |-------------|-------------|----------------|
12149 * | [240, 240] | 240 | [240, 240] |
12150 * ---------------------------------------------------------------|
12151 * Video stream is present in configure_streams |
12152 * ---------------------------------------------------------------|
12153 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12154 * | | | vid_min/max_fps|
12155 * ---------------------------------------------------------------|
12156 * NO | [ 30, 240] | 240 | [240, 240] |
12157 * (camcorder prev |-------------|-------------|----------------|
12158 * after video rec | [240, 240] | 240 | [240, 240] |
12159 * is stopped) | | | |
12160 * ---------------------------------------------------------------|
12161 * YES | [ 30, 240] | 240 | [240, 240] |
12162 * |-------------|-------------|----------------|
12163 * | [240, 240] | 240 | [240, 240] |
12164 * ---------------------------------------------------------------|
12165 * When Video stream is absent in configure_streams,
12166 * preview fps = sensor_fps / batchsize
12167 * Eg: for 240fps at batchSize 4, preview = 60fps
12168 * for 120fps at batchSize 4, preview = 30fps
12169 *
12170 * When video stream is present in configure_streams, preview fps is as per
12171 * the ratio of preview buffers to video buffers requested in process
12172 * capture request
12173 */
12174 mBatchSize = 0;
12175 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12176 fps_range.min_fps = fps_range.video_max_fps;
12177 fps_range.video_min_fps = fps_range.video_max_fps;
12178 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12179 fps_range.max_fps);
12180 if (NAME_NOT_FOUND != val) {
12181 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12182 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12183 return BAD_VALUE;
12184 }
12185
12186 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12187 /* If batchmode is currently in progress and the fps changes,
12188 * set the flag to restart the sensor */
12189 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12190 (mHFRVideoFps != fps_range.max_fps)) {
12191 mNeedSensorRestart = true;
12192 }
12193 mHFRVideoFps = fps_range.max_fps;
12194 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12195 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12196 mBatchSize = MAX_HFR_BATCH_SIZE;
12197 }
12198 }
12199 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12200
12201 }
12202 } else {
12203 /* HFR mode is session param in backend/ISP. This should be reset when
12204 * in non-HFR mode */
12205 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12206 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12207 return BAD_VALUE;
12208 }
12209 }
12210 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12211 return BAD_VALUE;
12212 }
12213 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12214 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12215 return rc;
12216}
12217
12218/*===========================================================================
12219 * FUNCTION : translateToHalMetadata
12220 *
12221 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12222 *
12223 *
12224 * PARAMETERS :
12225 * @request : request sent from framework
12226 *
12227 *
12228 * RETURN : success: NO_ERROR
12229 * failure:
12230 *==========================================================================*/
12231int QCamera3HardwareInterface::translateToHalMetadata
12232 (const camera3_capture_request_t *request,
12233 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012234 uint32_t snapshotStreamId) {
12235 if (request == nullptr || hal_metadata == nullptr) {
12236 return BAD_VALUE;
12237 }
12238
12239 int64_t minFrameDuration = getMinFrameDuration(request);
12240
12241 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12242 minFrameDuration);
12243}
12244
12245int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12246 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12247 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12248
Thierry Strudel3d639192016-09-09 11:52:26 -070012249 int rc = 0;
12250 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012251 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012252
12253 /* Do not change the order of the following list unless you know what you are
12254 * doing.
12255 * The order is laid out in such a way that parameters in the front of the table
12256 * may be used to override the parameters later in the table. Examples are:
12257 * 1. META_MODE should precede AEC/AWB/AF MODE
12258 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12259 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12260 * 4. Any mode should precede it's corresponding settings
12261 */
12262 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12263 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12264 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12265 rc = BAD_VALUE;
12266 }
12267 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12268 if (rc != NO_ERROR) {
12269 LOGE("extractSceneMode failed");
12270 }
12271 }
12272
12273 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12274 uint8_t fwk_aeMode =
12275 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12276 uint8_t aeMode;
12277 int32_t redeye;
12278
12279 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12280 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012281 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12282 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012283 } else {
12284 aeMode = CAM_AE_MODE_ON;
12285 }
12286 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12287 redeye = 1;
12288 } else {
12289 redeye = 0;
12290 }
12291
12292 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12293 fwk_aeMode);
12294 if (NAME_NOT_FOUND != val) {
12295 int32_t flashMode = (int32_t)val;
12296 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12297 }
12298
12299 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12300 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12301 rc = BAD_VALUE;
12302 }
12303 }
12304
12305 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12306 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12307 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12308 fwk_whiteLevel);
12309 if (NAME_NOT_FOUND != val) {
12310 uint8_t whiteLevel = (uint8_t)val;
12311 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12312 rc = BAD_VALUE;
12313 }
12314 }
12315 }
12316
12317 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12318 uint8_t fwk_cacMode =
12319 frame_settings.find(
12320 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12321 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12322 fwk_cacMode);
12323 if (NAME_NOT_FOUND != val) {
12324 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12325 bool entryAvailable = FALSE;
12326 // Check whether Frameworks set CAC mode is supported in device or not
12327 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12328 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12329 entryAvailable = TRUE;
12330 break;
12331 }
12332 }
12333 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12334 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12335 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12336 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12337 if (entryAvailable == FALSE) {
12338 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12339 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12340 } else {
12341 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12342 // High is not supported and so set the FAST as spec say's underlying
12343 // device implementation can be the same for both modes.
12344 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12345 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12346 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12347 // in order to avoid the fps drop due to high quality
12348 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12349 } else {
12350 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12351 }
12352 }
12353 }
12354 LOGD("Final cacMode is %d", cacMode);
12355 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12356 rc = BAD_VALUE;
12357 }
12358 } else {
12359 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12360 }
12361 }
12362
Jason Lee84ae9972017-02-24 13:24:24 -080012363 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012364 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012365 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012366 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012367 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12368 fwk_focusMode);
12369 if (NAME_NOT_FOUND != val) {
12370 uint8_t focusMode = (uint8_t)val;
12371 LOGD("set focus mode %d", focusMode);
12372 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12373 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12374 rc = BAD_VALUE;
12375 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012376 }
12377 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012378 } else {
12379 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12380 LOGE("Focus forced to infinity %d", focusMode);
12381 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12382 rc = BAD_VALUE;
12383 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012384 }
12385
Jason Lee84ae9972017-02-24 13:24:24 -080012386 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12387 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012388 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12389 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12390 focalDistance)) {
12391 rc = BAD_VALUE;
12392 }
12393 }
12394
12395 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12396 uint8_t fwk_antibandingMode =
12397 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12398 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12399 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12400 if (NAME_NOT_FOUND != val) {
12401 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012402 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12403 if (m60HzZone) {
12404 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12405 } else {
12406 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12407 }
12408 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012409 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12410 hal_antibandingMode)) {
12411 rc = BAD_VALUE;
12412 }
12413 }
12414 }
12415
12416 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12417 int32_t expCompensation = frame_settings.find(
12418 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12419 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12420 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12421 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12422 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012423 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012424 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12425 expCompensation)) {
12426 rc = BAD_VALUE;
12427 }
12428 }
12429
12430 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12431 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12432 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12433 rc = BAD_VALUE;
12434 }
12435 }
12436 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12437 rc = setHalFpsRange(frame_settings, hal_metadata);
12438 if (rc != NO_ERROR) {
12439 LOGE("setHalFpsRange failed");
12440 }
12441 }
12442
12443 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12444 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12445 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12446 rc = BAD_VALUE;
12447 }
12448 }
12449
12450 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12451 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12452 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12453 fwk_effectMode);
12454 if (NAME_NOT_FOUND != val) {
12455 uint8_t effectMode = (uint8_t)val;
12456 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12457 rc = BAD_VALUE;
12458 }
12459 }
12460 }
12461
12462 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12463 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12464 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12465 colorCorrectMode)) {
12466 rc = BAD_VALUE;
12467 }
12468 }
12469
12470 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12471 cam_color_correct_gains_t colorCorrectGains;
12472 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12473 colorCorrectGains.gains[i] =
12474 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12475 }
12476 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12477 colorCorrectGains)) {
12478 rc = BAD_VALUE;
12479 }
12480 }
12481
12482 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12483 cam_color_correct_matrix_t colorCorrectTransform;
12484 cam_rational_type_t transform_elem;
12485 size_t num = 0;
12486 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12487 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12488 transform_elem.numerator =
12489 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12490 transform_elem.denominator =
12491 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12492 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12493 num++;
12494 }
12495 }
12496 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12497 colorCorrectTransform)) {
12498 rc = BAD_VALUE;
12499 }
12500 }
12501
12502 cam_trigger_t aecTrigger;
12503 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12504 aecTrigger.trigger_id = -1;
12505 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12506 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12507 aecTrigger.trigger =
12508 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12509 aecTrigger.trigger_id =
12510 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12511 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12512 aecTrigger)) {
12513 rc = BAD_VALUE;
12514 }
12515 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12516 aecTrigger.trigger, aecTrigger.trigger_id);
12517 }
12518
12519 /*af_trigger must come with a trigger id*/
12520 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12521 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12522 cam_trigger_t af_trigger;
12523 af_trigger.trigger =
12524 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12525 af_trigger.trigger_id =
12526 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12527 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12528 rc = BAD_VALUE;
12529 }
12530 LOGD("AfTrigger: %d AfTriggerID: %d",
12531 af_trigger.trigger, af_trigger.trigger_id);
12532 }
12533
12534 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12535 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12536 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12537 rc = BAD_VALUE;
12538 }
12539 }
12540 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12541 cam_edge_application_t edge_application;
12542 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012543
Thierry Strudel3d639192016-09-09 11:52:26 -070012544 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12545 edge_application.sharpness = 0;
12546 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012547 edge_application.sharpness =
12548 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12549 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12550 int32_t sharpness =
12551 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12552 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12553 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12554 LOGD("Setting edge mode sharpness %d", sharpness);
12555 edge_application.sharpness = sharpness;
12556 }
12557 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012558 }
12559 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12560 rc = BAD_VALUE;
12561 }
12562 }
12563
12564 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12565 int32_t respectFlashMode = 1;
12566 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12567 uint8_t fwk_aeMode =
12568 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012569 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12570 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12571 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012572 respectFlashMode = 0;
12573 LOGH("AE Mode controls flash, ignore android.flash.mode");
12574 }
12575 }
12576 if (respectFlashMode) {
12577 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12578 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12579 LOGH("flash mode after mapping %d", val);
12580 // To check: CAM_INTF_META_FLASH_MODE usage
12581 if (NAME_NOT_FOUND != val) {
12582 uint8_t flashMode = (uint8_t)val;
12583 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12584 rc = BAD_VALUE;
12585 }
12586 }
12587 }
12588 }
12589
12590 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12591 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12592 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12593 rc = BAD_VALUE;
12594 }
12595 }
12596
12597 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12598 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12599 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12600 flashFiringTime)) {
12601 rc = BAD_VALUE;
12602 }
12603 }
12604
12605 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12606 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12607 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12608 hotPixelMode)) {
12609 rc = BAD_VALUE;
12610 }
12611 }
12612
12613 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12614 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12615 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12616 lensAperture)) {
12617 rc = BAD_VALUE;
12618 }
12619 }
12620
12621 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12622 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12623 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12624 filterDensity)) {
12625 rc = BAD_VALUE;
12626 }
12627 }
12628
12629 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12630 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12631 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12632 focalLength)) {
12633 rc = BAD_VALUE;
12634 }
12635 }
12636
12637 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12638 uint8_t optStabMode =
12639 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12640 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12641 optStabMode)) {
12642 rc = BAD_VALUE;
12643 }
12644 }
12645
12646 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12647 uint8_t videoStabMode =
12648 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12649 LOGD("videoStabMode from APP = %d", videoStabMode);
12650 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12651 videoStabMode)) {
12652 rc = BAD_VALUE;
12653 }
12654 }
12655
12656
12657 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12658 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12659 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12660 noiseRedMode)) {
12661 rc = BAD_VALUE;
12662 }
12663 }
12664
12665 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12666 float reprocessEffectiveExposureFactor =
12667 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12668 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12669 reprocessEffectiveExposureFactor)) {
12670 rc = BAD_VALUE;
12671 }
12672 }
12673
12674 cam_crop_region_t scalerCropRegion;
12675 bool scalerCropSet = false;
12676 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12677 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12678 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12679 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12680 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12681
12682 // Map coordinate system from active array to sensor output.
12683 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12684 scalerCropRegion.width, scalerCropRegion.height);
12685
12686 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12687 scalerCropRegion)) {
12688 rc = BAD_VALUE;
12689 }
12690 scalerCropSet = true;
12691 }
12692
12693 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12694 int64_t sensorExpTime =
12695 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12696 LOGD("setting sensorExpTime %lld", sensorExpTime);
12697 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12698 sensorExpTime)) {
12699 rc = BAD_VALUE;
12700 }
12701 }
12702
12703 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12704 int64_t sensorFrameDuration =
12705 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012706 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12707 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12708 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12709 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12710 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12711 sensorFrameDuration)) {
12712 rc = BAD_VALUE;
12713 }
12714 }
12715
12716 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12717 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12718 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12719 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12720 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12721 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12722 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12723 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12724 sensorSensitivity)) {
12725 rc = BAD_VALUE;
12726 }
12727 }
12728
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012729#ifndef USE_HAL_3_3
12730 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12731 int32_t ispSensitivity =
12732 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12733 if (ispSensitivity <
12734 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12735 ispSensitivity =
12736 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12737 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12738 }
12739 if (ispSensitivity >
12740 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12741 ispSensitivity =
12742 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12743 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12744 }
12745 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12746 ispSensitivity)) {
12747 rc = BAD_VALUE;
12748 }
12749 }
12750#endif
12751
Thierry Strudel3d639192016-09-09 11:52:26 -070012752 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12753 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12754 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12755 rc = BAD_VALUE;
12756 }
12757 }
12758
12759 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12760 uint8_t fwk_facedetectMode =
12761 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12762
12763 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12764 fwk_facedetectMode);
12765
12766 if (NAME_NOT_FOUND != val) {
12767 uint8_t facedetectMode = (uint8_t)val;
12768 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12769 facedetectMode)) {
12770 rc = BAD_VALUE;
12771 }
12772 }
12773 }
12774
Thierry Strudel54dc9782017-02-15 12:12:10 -080012775 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012776 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012777 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012778 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12779 histogramMode)) {
12780 rc = BAD_VALUE;
12781 }
12782 }
12783
12784 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12785 uint8_t sharpnessMapMode =
12786 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12787 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12788 sharpnessMapMode)) {
12789 rc = BAD_VALUE;
12790 }
12791 }
12792
12793 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12794 uint8_t tonemapMode =
12795 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12796 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12797 rc = BAD_VALUE;
12798 }
12799 }
12800 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12801 /*All tonemap channels will have the same number of points*/
12802 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12803 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12804 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12805 cam_rgb_tonemap_curves tonemapCurves;
12806 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12807 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12808 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12809 tonemapCurves.tonemap_points_cnt,
12810 CAM_MAX_TONEMAP_CURVE_SIZE);
12811 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12812 }
12813
12814 /* ch0 = G*/
12815 size_t point = 0;
12816 cam_tonemap_curve_t tonemapCurveGreen;
12817 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12818 for (size_t j = 0; j < 2; j++) {
12819 tonemapCurveGreen.tonemap_points[i][j] =
12820 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12821 point++;
12822 }
12823 }
12824 tonemapCurves.curves[0] = tonemapCurveGreen;
12825
12826 /* ch 1 = B */
12827 point = 0;
12828 cam_tonemap_curve_t tonemapCurveBlue;
12829 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12830 for (size_t j = 0; j < 2; j++) {
12831 tonemapCurveBlue.tonemap_points[i][j] =
12832 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12833 point++;
12834 }
12835 }
12836 tonemapCurves.curves[1] = tonemapCurveBlue;
12837
12838 /* ch 2 = R */
12839 point = 0;
12840 cam_tonemap_curve_t tonemapCurveRed;
12841 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12842 for (size_t j = 0; j < 2; j++) {
12843 tonemapCurveRed.tonemap_points[i][j] =
12844 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12845 point++;
12846 }
12847 }
12848 tonemapCurves.curves[2] = tonemapCurveRed;
12849
12850 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12851 tonemapCurves)) {
12852 rc = BAD_VALUE;
12853 }
12854 }
12855
12856 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12857 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12858 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12859 captureIntent)) {
12860 rc = BAD_VALUE;
12861 }
12862 }
12863
12864 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12865 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12866 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12867 blackLevelLock)) {
12868 rc = BAD_VALUE;
12869 }
12870 }
12871
12872 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12873 uint8_t lensShadingMapMode =
12874 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12875 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12876 lensShadingMapMode)) {
12877 rc = BAD_VALUE;
12878 }
12879 }
12880
12881 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12882 cam_area_t roi;
12883 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012884 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012885
12886 // Map coordinate system from active array to sensor output.
12887 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12888 roi.rect.height);
12889
12890 if (scalerCropSet) {
12891 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12892 }
12893 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12894 rc = BAD_VALUE;
12895 }
12896 }
12897
12898 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12899 cam_area_t roi;
12900 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012901 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012902
12903 // Map coordinate system from active array to sensor output.
12904 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12905 roi.rect.height);
12906
12907 if (scalerCropSet) {
12908 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12909 }
12910 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12911 rc = BAD_VALUE;
12912 }
12913 }
12914
12915 // CDS for non-HFR non-video mode
12916 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12917 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12918 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12919 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12920 LOGE("Invalid CDS mode %d!", *fwk_cds);
12921 } else {
12922 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12923 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12924 rc = BAD_VALUE;
12925 }
12926 }
12927 }
12928
Thierry Strudel04e026f2016-10-10 11:27:36 -070012929 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012930 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012931 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012932 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12933 }
12934 if (m_bVideoHdrEnabled)
12935 vhdr = CAM_VIDEO_HDR_MODE_ON;
12936
Thierry Strudel54dc9782017-02-15 12:12:10 -080012937 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12938
12939 if(vhdr != curr_hdr_state)
12940 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12941
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012942 rc = setVideoHdrMode(mParameters, vhdr);
12943 if (rc != NO_ERROR) {
12944 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012945 }
12946
12947 //IR
12948 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12949 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12950 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012951 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12952 uint8_t isIRon = 0;
12953
12954 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012955 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12956 LOGE("Invalid IR mode %d!", fwk_ir);
12957 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012958 if(isIRon != curr_ir_state )
12959 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12960
Thierry Strudel04e026f2016-10-10 11:27:36 -070012961 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12962 CAM_INTF_META_IR_MODE, fwk_ir)) {
12963 rc = BAD_VALUE;
12964 }
12965 }
12966 }
12967
Thierry Strudel54dc9782017-02-15 12:12:10 -080012968 //Binning Correction Mode
12969 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12970 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12971 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12972 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12973 || (0 > fwk_binning_correction)) {
12974 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12975 } else {
12976 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12977 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12978 rc = BAD_VALUE;
12979 }
12980 }
12981 }
12982
Thierry Strudel269c81a2016-10-12 12:13:59 -070012983 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12984 float aec_speed;
12985 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12986 LOGD("AEC Speed :%f", aec_speed);
12987 if ( aec_speed < 0 ) {
12988 LOGE("Invalid AEC mode %f!", aec_speed);
12989 } else {
12990 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12991 aec_speed)) {
12992 rc = BAD_VALUE;
12993 }
12994 }
12995 }
12996
12997 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12998 float awb_speed;
12999 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
13000 LOGD("AWB Speed :%f", awb_speed);
13001 if ( awb_speed < 0 ) {
13002 LOGE("Invalid AWB mode %f!", awb_speed);
13003 } else {
13004 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
13005 awb_speed)) {
13006 rc = BAD_VALUE;
13007 }
13008 }
13009 }
13010
Thierry Strudel3d639192016-09-09 11:52:26 -070013011 // TNR
13012 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
13013 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
13014 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080013015 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070013016 cam_denoise_param_t tnr;
13017 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
13018 tnr.process_plates =
13019 (cam_denoise_process_type_t)frame_settings.find(
13020 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
13021 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080013022
13023 if(b_TnrRequested != curr_tnr_state)
13024 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
13025
Thierry Strudel3d639192016-09-09 11:52:26 -070013026 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
13027 rc = BAD_VALUE;
13028 }
13029 }
13030
Thierry Strudel54dc9782017-02-15 12:12:10 -080013031 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013032 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080013033 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013034 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
13035 *exposure_metering_mode)) {
13036 rc = BAD_VALUE;
13037 }
13038 }
13039
Thierry Strudel3d639192016-09-09 11:52:26 -070013040 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
13041 int32_t fwk_testPatternMode =
13042 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
13043 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
13044 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
13045
13046 if (NAME_NOT_FOUND != testPatternMode) {
13047 cam_test_pattern_data_t testPatternData;
13048 memset(&testPatternData, 0, sizeof(testPatternData));
13049 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
13050 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
13051 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
13052 int32_t *fwk_testPatternData =
13053 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
13054 testPatternData.r = fwk_testPatternData[0];
13055 testPatternData.b = fwk_testPatternData[3];
13056 switch (gCamCapability[mCameraId]->color_arrangement) {
13057 case CAM_FILTER_ARRANGEMENT_RGGB:
13058 case CAM_FILTER_ARRANGEMENT_GRBG:
13059 testPatternData.gr = fwk_testPatternData[1];
13060 testPatternData.gb = fwk_testPatternData[2];
13061 break;
13062 case CAM_FILTER_ARRANGEMENT_GBRG:
13063 case CAM_FILTER_ARRANGEMENT_BGGR:
13064 testPatternData.gr = fwk_testPatternData[2];
13065 testPatternData.gb = fwk_testPatternData[1];
13066 break;
13067 default:
13068 LOGE("color arrangement %d is not supported",
13069 gCamCapability[mCameraId]->color_arrangement);
13070 break;
13071 }
13072 }
13073 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
13074 testPatternData)) {
13075 rc = BAD_VALUE;
13076 }
13077 } else {
13078 LOGE("Invalid framework sensor test pattern mode %d",
13079 fwk_testPatternMode);
13080 }
13081 }
13082
13083 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
13084 size_t count = 0;
13085 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
13086 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
13087 gps_coords.data.d, gps_coords.count, count);
13088 if (gps_coords.count != count) {
13089 rc = BAD_VALUE;
13090 }
13091 }
13092
13093 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
13094 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
13095 size_t count = 0;
13096 const char *gps_methods_src = (const char *)
13097 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
13098 memset(gps_methods, '\0', sizeof(gps_methods));
13099 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
13100 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
13101 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
13102 if (GPS_PROCESSING_METHOD_SIZE != count) {
13103 rc = BAD_VALUE;
13104 }
13105 }
13106
13107 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
13108 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
13109 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
13110 gps_timestamp)) {
13111 rc = BAD_VALUE;
13112 }
13113 }
13114
13115 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
13116 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
13117 cam_rotation_info_t rotation_info;
13118 if (orientation == 0) {
13119 rotation_info.rotation = ROTATE_0;
13120 } else if (orientation == 90) {
13121 rotation_info.rotation = ROTATE_90;
13122 } else if (orientation == 180) {
13123 rotation_info.rotation = ROTATE_180;
13124 } else if (orientation == 270) {
13125 rotation_info.rotation = ROTATE_270;
13126 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070013127 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070013128 rotation_info.streamId = snapshotStreamId;
13129 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
13130 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
13131 rc = BAD_VALUE;
13132 }
13133 }
13134
13135 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
13136 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
13137 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
13138 rc = BAD_VALUE;
13139 }
13140 }
13141
13142 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13143 uint32_t thumb_quality = (uint32_t)
13144 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13145 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13146 thumb_quality)) {
13147 rc = BAD_VALUE;
13148 }
13149 }
13150
13151 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13152 cam_dimension_t dim;
13153 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13154 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13155 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13156 rc = BAD_VALUE;
13157 }
13158 }
13159
13160 // Internal metadata
13161 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13162 size_t count = 0;
13163 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13164 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13165 privatedata.data.i32, privatedata.count, count);
13166 if (privatedata.count != count) {
13167 rc = BAD_VALUE;
13168 }
13169 }
13170
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013171 // ISO/Exposure Priority
13172 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13173 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13174 cam_priority_mode_t mode =
13175 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13176 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13177 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13178 use_iso_exp_pty.previewOnly = FALSE;
13179 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13180 use_iso_exp_pty.value = *ptr;
13181
13182 if(CAM_ISO_PRIORITY == mode) {
13183 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13184 use_iso_exp_pty)) {
13185 rc = BAD_VALUE;
13186 }
13187 }
13188 else {
13189 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13190 use_iso_exp_pty)) {
13191 rc = BAD_VALUE;
13192 }
13193 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013194
13195 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13196 rc = BAD_VALUE;
13197 }
13198 }
13199 } else {
13200 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13201 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013202 }
13203 }
13204
13205 // Saturation
13206 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13207 int32_t* use_saturation =
13208 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13209 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13210 rc = BAD_VALUE;
13211 }
13212 }
13213
Thierry Strudel3d639192016-09-09 11:52:26 -070013214 // EV step
13215 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13216 gCamCapability[mCameraId]->exp_compensation_step)) {
13217 rc = BAD_VALUE;
13218 }
13219
13220 // CDS info
13221 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13222 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13223 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13224
13225 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13226 CAM_INTF_META_CDS_DATA, *cdsData)) {
13227 rc = BAD_VALUE;
13228 }
13229 }
13230
Shuzhen Wang19463d72016-03-08 11:09:52 -080013231 // Hybrid AE
13232 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13233 uint8_t *hybrid_ae = (uint8_t *)
13234 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
Shuzhen Wang77b049a2017-08-30 12:24:36 -070013235 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13236 rc = BAD_VALUE;
13237 }
Shuzhen Wang19463d72016-03-08 11:09:52 -080013238 }
13239
Shuzhen Wang14415f52016-11-16 18:26:18 -080013240 // Histogram
13241 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13242 uint8_t histogramMode =
13243 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13244 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13245 histogramMode)) {
13246 rc = BAD_VALUE;
13247 }
13248 }
13249
13250 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13251 int32_t histogramBins =
13252 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13253 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13254 histogramBins)) {
13255 rc = BAD_VALUE;
13256 }
13257 }
13258
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013259 // Tracking AF
13260 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13261 uint8_t trackingAfTrigger =
13262 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13263 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13264 trackingAfTrigger)) {
13265 rc = BAD_VALUE;
13266 }
13267 }
13268
Chien-Yu Chendbd619b2017-08-04 17:50:11 -070013269 // Makernote
13270 camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13271 if (entry.count != 0) {
13272 if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13273 cam_makernote_t makernote;
13274 makernote.length = entry.count;
13275 memcpy(makernote.data, entry.data.u8, makernote.length);
13276 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13277 rc = BAD_VALUE;
13278 }
13279 } else {
13280 ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13281 MAX_MAKERNOTE_LENGTH);
13282 rc = BAD_VALUE;
13283 }
13284 }
13285
Thierry Strudel3d639192016-09-09 11:52:26 -070013286 return rc;
13287}
13288
13289/*===========================================================================
13290 * FUNCTION : captureResultCb
13291 *
13292 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13293 *
13294 * PARAMETERS :
13295 * @frame : frame information from mm-camera-interface
13296 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13297 * @userdata: userdata
13298 *
13299 * RETURN : NONE
13300 *==========================================================================*/
13301void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13302 camera3_stream_buffer_t *buffer,
13303 uint32_t frame_number, bool isInputBuffer, void *userdata)
13304{
13305 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13306 if (hw == NULL) {
13307 LOGE("Invalid hw %p", hw);
13308 return;
13309 }
13310
13311 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13312 return;
13313}
13314
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013315/*===========================================================================
13316 * FUNCTION : setBufferErrorStatus
13317 *
13318 * DESCRIPTION: Callback handler for channels to report any buffer errors
13319 *
13320 * PARAMETERS :
13321 * @ch : Channel on which buffer error is reported from
13322 * @frame_number : frame number on which buffer error is reported on
13323 * @buffer_status : buffer error status
13324 * @userdata: userdata
13325 *
13326 * RETURN : NONE
13327 *==========================================================================*/
13328void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13329 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13330{
13331 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13332 if (hw == NULL) {
13333 LOGE("Invalid hw %p", hw);
13334 return;
13335 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013336
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013337 hw->setBufferErrorStatus(ch, frame_number, err);
13338 return;
13339}
13340
13341void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13342 uint32_t frameNumber, camera3_buffer_status_t err)
13343{
13344 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13345 pthread_mutex_lock(&mMutex);
13346
13347 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13348 if (req.frame_number != frameNumber)
13349 continue;
13350 for (auto& k : req.mPendingBufferList) {
13351 if(k.stream->priv == ch) {
13352 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13353 }
13354 }
13355 }
13356
13357 pthread_mutex_unlock(&mMutex);
13358 return;
13359}
Thierry Strudel3d639192016-09-09 11:52:26 -070013360/*===========================================================================
13361 * FUNCTION : initialize
13362 *
13363 * DESCRIPTION: Pass framework callback pointers to HAL
13364 *
13365 * PARAMETERS :
13366 *
13367 *
13368 * RETURN : Success : 0
13369 * Failure: -ENODEV
13370 *==========================================================================*/
13371
13372int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13373 const camera3_callback_ops_t *callback_ops)
13374{
13375 LOGD("E");
13376 QCamera3HardwareInterface *hw =
13377 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13378 if (!hw) {
13379 LOGE("NULL camera device");
13380 return -ENODEV;
13381 }
13382
13383 int rc = hw->initialize(callback_ops);
13384 LOGD("X");
13385 return rc;
13386}
13387
13388/*===========================================================================
13389 * FUNCTION : configure_streams
13390 *
13391 * DESCRIPTION:
13392 *
13393 * PARAMETERS :
13394 *
13395 *
13396 * RETURN : Success: 0
13397 * Failure: -EINVAL (if stream configuration is invalid)
13398 * -ENODEV (fatal error)
13399 *==========================================================================*/
13400
13401int QCamera3HardwareInterface::configure_streams(
13402 const struct camera3_device *device,
13403 camera3_stream_configuration_t *stream_list)
13404{
13405 LOGD("E");
13406 QCamera3HardwareInterface *hw =
13407 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13408 if (!hw) {
13409 LOGE("NULL camera device");
13410 return -ENODEV;
13411 }
13412 int rc = hw->configureStreams(stream_list);
13413 LOGD("X");
13414 return rc;
13415}
13416
13417/*===========================================================================
13418 * FUNCTION : construct_default_request_settings
13419 *
13420 * DESCRIPTION: Configure a settings buffer to meet the required use case
13421 *
13422 * PARAMETERS :
13423 *
13424 *
13425 * RETURN : Success: Return valid metadata
13426 * Failure: Return NULL
13427 *==========================================================================*/
13428const camera_metadata_t* QCamera3HardwareInterface::
13429 construct_default_request_settings(const struct camera3_device *device,
13430 int type)
13431{
13432
13433 LOGD("E");
13434 camera_metadata_t* fwk_metadata = NULL;
13435 QCamera3HardwareInterface *hw =
13436 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13437 if (!hw) {
13438 LOGE("NULL camera device");
13439 return NULL;
13440 }
13441
13442 fwk_metadata = hw->translateCapabilityToMetadata(type);
13443
13444 LOGD("X");
13445 return fwk_metadata;
13446}
13447
13448/*===========================================================================
13449 * FUNCTION : process_capture_request
13450 *
13451 * DESCRIPTION:
13452 *
13453 * PARAMETERS :
13454 *
13455 *
13456 * RETURN :
13457 *==========================================================================*/
13458int QCamera3HardwareInterface::process_capture_request(
13459 const struct camera3_device *device,
13460 camera3_capture_request_t *request)
13461{
13462 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013463 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013464 QCamera3HardwareInterface *hw =
13465 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13466 if (!hw) {
13467 LOGE("NULL camera device");
13468 return -EINVAL;
13469 }
13470
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013471 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013472 LOGD("X");
13473 return rc;
13474}
13475
13476/*===========================================================================
13477 * FUNCTION : dump
13478 *
13479 * DESCRIPTION:
13480 *
13481 * PARAMETERS :
13482 *
13483 *
13484 * RETURN :
13485 *==========================================================================*/
13486
13487void QCamera3HardwareInterface::dump(
13488 const struct camera3_device *device, int fd)
13489{
13490 /* Log level property is read when "adb shell dumpsys media.camera" is
13491 called so that the log level can be controlled without restarting
13492 the media server */
13493 getLogLevel();
13494
13495 LOGD("E");
13496 QCamera3HardwareInterface *hw =
13497 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13498 if (!hw) {
13499 LOGE("NULL camera device");
13500 return;
13501 }
13502
13503 hw->dump(fd);
13504 LOGD("X");
13505 return;
13506}
13507
13508/*===========================================================================
13509 * FUNCTION : flush
13510 *
13511 * DESCRIPTION:
13512 *
13513 * PARAMETERS :
13514 *
13515 *
13516 * RETURN :
13517 *==========================================================================*/
13518
13519int QCamera3HardwareInterface::flush(
13520 const struct camera3_device *device)
13521{
13522 int rc;
13523 LOGD("E");
13524 QCamera3HardwareInterface *hw =
13525 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13526 if (!hw) {
13527 LOGE("NULL camera device");
13528 return -EINVAL;
13529 }
13530
13531 pthread_mutex_lock(&hw->mMutex);
13532 // Validate current state
13533 switch (hw->mState) {
13534 case STARTED:
13535 /* valid state */
13536 break;
13537
13538 case ERROR:
13539 pthread_mutex_unlock(&hw->mMutex);
13540 hw->handleCameraDeviceError();
13541 return -ENODEV;
13542
13543 default:
13544 LOGI("Flush returned during state %d", hw->mState);
13545 pthread_mutex_unlock(&hw->mMutex);
13546 return 0;
13547 }
13548 pthread_mutex_unlock(&hw->mMutex);
13549
13550 rc = hw->flush(true /* restart channels */ );
13551 LOGD("X");
13552 return rc;
13553}
13554
13555/*===========================================================================
13556 * FUNCTION : close_camera_device
13557 *
13558 * DESCRIPTION:
13559 *
13560 * PARAMETERS :
13561 *
13562 *
13563 * RETURN :
13564 *==========================================================================*/
13565int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13566{
13567 int ret = NO_ERROR;
13568 QCamera3HardwareInterface *hw =
13569 reinterpret_cast<QCamera3HardwareInterface *>(
13570 reinterpret_cast<camera3_device_t *>(device)->priv);
13571 if (!hw) {
13572 LOGE("NULL camera device");
13573 return BAD_VALUE;
13574 }
13575
13576 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13577 delete hw;
13578 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013579 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013580 return ret;
13581}
13582
13583/*===========================================================================
13584 * FUNCTION : getWaveletDenoiseProcessPlate
13585 *
13586 * DESCRIPTION: query wavelet denoise process plate
13587 *
13588 * PARAMETERS : None
13589 *
13590 * RETURN : WNR prcocess plate value
13591 *==========================================================================*/
13592cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13593{
13594 char prop[PROPERTY_VALUE_MAX];
13595 memset(prop, 0, sizeof(prop));
13596 property_get("persist.denoise.process.plates", prop, "0");
13597 int processPlate = atoi(prop);
13598 switch(processPlate) {
13599 case 0:
13600 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13601 case 1:
13602 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13603 case 2:
13604 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13605 case 3:
13606 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13607 default:
13608 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13609 }
13610}
13611
13612
13613/*===========================================================================
13614 * FUNCTION : getTemporalDenoiseProcessPlate
13615 *
13616 * DESCRIPTION: query temporal denoise process plate
13617 *
13618 * PARAMETERS : None
13619 *
13620 * RETURN : TNR prcocess plate value
13621 *==========================================================================*/
13622cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13623{
13624 char prop[PROPERTY_VALUE_MAX];
13625 memset(prop, 0, sizeof(prop));
13626 property_get("persist.tnr.process.plates", prop, "0");
13627 int processPlate = atoi(prop);
13628 switch(processPlate) {
13629 case 0:
13630 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13631 case 1:
13632 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13633 case 2:
13634 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13635 case 3:
13636 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13637 default:
13638 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13639 }
13640}
13641
13642
13643/*===========================================================================
13644 * FUNCTION : extractSceneMode
13645 *
13646 * DESCRIPTION: Extract scene mode from frameworks set metadata
13647 *
13648 * PARAMETERS :
13649 * @frame_settings: CameraMetadata reference
13650 * @metaMode: ANDROID_CONTORL_MODE
13651 * @hal_metadata: hal metadata structure
13652 *
13653 * RETURN : None
13654 *==========================================================================*/
13655int32_t QCamera3HardwareInterface::extractSceneMode(
13656 const CameraMetadata &frame_settings, uint8_t metaMode,
13657 metadata_buffer_t *hal_metadata)
13658{
13659 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013660 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13661
13662 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13663 LOGD("Ignoring control mode OFF_KEEP_STATE");
13664 return NO_ERROR;
13665 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013666
13667 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13668 camera_metadata_ro_entry entry =
13669 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13670 if (0 == entry.count)
13671 return rc;
13672
13673 uint8_t fwk_sceneMode = entry.data.u8[0];
13674
13675 int val = lookupHalName(SCENE_MODES_MAP,
13676 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13677 fwk_sceneMode);
13678 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013679 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013680 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013681 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013682 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013683
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013684 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13685 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13686 }
13687
13688 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13689 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013690 cam_hdr_param_t hdr_params;
13691 hdr_params.hdr_enable = 1;
13692 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13693 hdr_params.hdr_need_1x = false;
13694 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13695 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13696 rc = BAD_VALUE;
13697 }
13698 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013699
Thierry Strudel3d639192016-09-09 11:52:26 -070013700 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13701 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13702 rc = BAD_VALUE;
13703 }
13704 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013705
13706 if (mForceHdrSnapshot) {
13707 cam_hdr_param_t hdr_params;
13708 hdr_params.hdr_enable = 1;
13709 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13710 hdr_params.hdr_need_1x = false;
13711 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13712 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13713 rc = BAD_VALUE;
13714 }
13715 }
13716
Thierry Strudel3d639192016-09-09 11:52:26 -070013717 return rc;
13718}
13719
13720/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013721 * FUNCTION : setVideoHdrMode
13722 *
13723 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13724 *
13725 * PARAMETERS :
13726 * @hal_metadata: hal metadata structure
13727 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13728 *
13729 * RETURN : None
13730 *==========================================================================*/
13731int32_t QCamera3HardwareInterface::setVideoHdrMode(
13732 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13733{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013734 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13735 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13736 }
13737
13738 LOGE("Invalid Video HDR mode %d!", vhdr);
13739 return BAD_VALUE;
13740}
13741
13742/*===========================================================================
13743 * FUNCTION : setSensorHDR
13744 *
13745 * DESCRIPTION: Enable/disable sensor HDR.
13746 *
13747 * PARAMETERS :
13748 * @hal_metadata: hal metadata structure
13749 * @enable: boolean whether to enable/disable sensor HDR
13750 *
13751 * RETURN : None
13752 *==========================================================================*/
13753int32_t QCamera3HardwareInterface::setSensorHDR(
13754 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13755{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013756 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013757 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13758
13759 if (enable) {
13760 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13761 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13762 #ifdef _LE_CAMERA_
13763 //Default to staggered HDR for IOT
13764 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13765 #else
13766 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13767 #endif
13768 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13769 }
13770
13771 bool isSupported = false;
13772 switch (sensor_hdr) {
13773 case CAM_SENSOR_HDR_IN_SENSOR:
13774 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13775 CAM_QCOM_FEATURE_SENSOR_HDR) {
13776 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013777 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013778 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013779 break;
13780 case CAM_SENSOR_HDR_ZIGZAG:
13781 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13782 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13783 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013784 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013785 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013786 break;
13787 case CAM_SENSOR_HDR_STAGGERED:
13788 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13789 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13790 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013791 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013792 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013793 break;
13794 case CAM_SENSOR_HDR_OFF:
13795 isSupported = true;
13796 LOGD("Turning off sensor HDR");
13797 break;
13798 default:
13799 LOGE("HDR mode %d not supported", sensor_hdr);
13800 rc = BAD_VALUE;
13801 break;
13802 }
13803
13804 if(isSupported) {
13805 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13806 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13807 rc = BAD_VALUE;
13808 } else {
13809 if(!isVideoHdrEnable)
13810 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013811 }
13812 }
13813 return rc;
13814}
13815
13816/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013817 * FUNCTION : needRotationReprocess
13818 *
13819 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13820 *
13821 * PARAMETERS : none
13822 *
13823 * RETURN : true: needed
13824 * false: no need
13825 *==========================================================================*/
13826bool QCamera3HardwareInterface::needRotationReprocess()
13827{
13828 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13829 // current rotation is not zero, and pp has the capability to process rotation
13830 LOGH("need do reprocess for rotation");
13831 return true;
13832 }
13833
13834 return false;
13835}
13836
13837/*===========================================================================
13838 * FUNCTION : needReprocess
13839 *
13840 * DESCRIPTION: if reprocess in needed
13841 *
13842 * PARAMETERS : none
13843 *
13844 * RETURN : true: needed
13845 * false: no need
13846 *==========================================================================*/
13847bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13848{
13849 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13850 // TODO: add for ZSL HDR later
13851 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13852 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13853 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13854 return true;
13855 } else {
13856 LOGH("already post processed frame");
13857 return false;
13858 }
13859 }
13860 return needRotationReprocess();
13861}
13862
13863/*===========================================================================
13864 * FUNCTION : needJpegExifRotation
13865 *
13866 * DESCRIPTION: if rotation from jpeg is needed
13867 *
13868 * PARAMETERS : none
13869 *
13870 * RETURN : true: needed
13871 * false: no need
13872 *==========================================================================*/
13873bool QCamera3HardwareInterface::needJpegExifRotation()
13874{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013875 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013876 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13877 LOGD("Need use Jpeg EXIF Rotation");
13878 return true;
13879 }
13880 return false;
13881}
13882
13883/*===========================================================================
13884 * FUNCTION : addOfflineReprocChannel
13885 *
13886 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13887 * coming from input channel
13888 *
13889 * PARAMETERS :
13890 * @config : reprocess configuration
13891 * @inputChHandle : pointer to the input (source) channel
13892 *
13893 *
13894 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13895 *==========================================================================*/
13896QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13897 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13898{
13899 int32_t rc = NO_ERROR;
13900 QCamera3ReprocessChannel *pChannel = NULL;
13901
13902 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013903 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13904 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013905 if (NULL == pChannel) {
13906 LOGE("no mem for reprocess channel");
13907 return NULL;
13908 }
13909
13910 rc = pChannel->initialize(IS_TYPE_NONE);
13911 if (rc != NO_ERROR) {
13912 LOGE("init reprocess channel failed, ret = %d", rc);
13913 delete pChannel;
13914 return NULL;
13915 }
13916
13917 // pp feature config
13918 cam_pp_feature_config_t pp_config;
13919 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13920
13921 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13922 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13923 & CAM_QCOM_FEATURE_DSDN) {
13924 //Use CPP CDS incase h/w supports it.
13925 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13926 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13927 }
13928 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13929 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13930 }
13931
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013932 if (config.hdr_param.hdr_enable) {
13933 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13934 pp_config.hdr_param = config.hdr_param;
13935 }
13936
13937 if (mForceHdrSnapshot) {
13938 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13939 pp_config.hdr_param.hdr_enable = 1;
13940 pp_config.hdr_param.hdr_need_1x = 0;
13941 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13942 }
13943
Thierry Strudel3d639192016-09-09 11:52:26 -070013944 rc = pChannel->addReprocStreamsFromSource(pp_config,
13945 config,
13946 IS_TYPE_NONE,
13947 mMetadataChannel);
13948
13949 if (rc != NO_ERROR) {
13950 delete pChannel;
13951 return NULL;
13952 }
13953 return pChannel;
13954}
13955
13956/*===========================================================================
13957 * FUNCTION : getMobicatMask
13958 *
13959 * DESCRIPTION: returns mobicat mask
13960 *
13961 * PARAMETERS : none
13962 *
13963 * RETURN : mobicat mask
13964 *
13965 *==========================================================================*/
13966uint8_t QCamera3HardwareInterface::getMobicatMask()
13967{
13968 return m_MobicatMask;
13969}
13970
13971/*===========================================================================
13972 * FUNCTION : setMobicat
13973 *
13974 * DESCRIPTION: set Mobicat on/off.
13975 *
13976 * PARAMETERS :
13977 * @params : none
13978 *
13979 * RETURN : int32_t type of status
13980 * NO_ERROR -- success
13981 * none-zero failure code
13982 *==========================================================================*/
13983int32_t QCamera3HardwareInterface::setMobicat()
13984{
Thierry Strudel3d639192016-09-09 11:52:26 -070013985 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013986
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013987 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013988 tune_cmd_t tune_cmd;
13989 tune_cmd.type = SET_RELOAD_CHROMATIX;
13990 tune_cmd.module = MODULE_ALL;
13991 tune_cmd.value = TRUE;
13992 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13993 CAM_INTF_PARM_SET_VFE_COMMAND,
13994 tune_cmd);
13995
13996 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13997 CAM_INTF_PARM_SET_PP_COMMAND,
13998 tune_cmd);
13999 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014000
14001 return ret;
14002}
14003
14004/*===========================================================================
14005* FUNCTION : getLogLevel
14006*
14007* DESCRIPTION: Reads the log level property into a variable
14008*
14009* PARAMETERS :
14010* None
14011*
14012* RETURN :
14013* None
14014*==========================================================================*/
14015void QCamera3HardwareInterface::getLogLevel()
14016{
14017 char prop[PROPERTY_VALUE_MAX];
14018 uint32_t globalLogLevel = 0;
14019
14020 property_get("persist.camera.hal.debug", prop, "0");
14021 int val = atoi(prop);
14022 if (0 <= val) {
14023 gCamHal3LogLevel = (uint32_t)val;
14024 }
14025
Thierry Strudel9ec39c62016-12-28 11:30:05 -080014026 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070014027 gKpiDebugLevel = atoi(prop);
14028
14029 property_get("persist.camera.global.debug", prop, "0");
14030 val = atoi(prop);
14031 if (0 <= val) {
14032 globalLogLevel = (uint32_t)val;
14033 }
14034
14035 /* Highest log level among hal.logs and global.logs is selected */
14036 if (gCamHal3LogLevel < globalLogLevel)
14037 gCamHal3LogLevel = globalLogLevel;
14038
14039 return;
14040}
14041
14042/*===========================================================================
14043 * FUNCTION : validateStreamRotations
14044 *
14045 * DESCRIPTION: Check if the rotations requested are supported
14046 *
14047 * PARAMETERS :
14048 * @stream_list : streams to be configured
14049 *
14050 * RETURN : NO_ERROR on success
14051 * -EINVAL on failure
14052 *
14053 *==========================================================================*/
14054int QCamera3HardwareInterface::validateStreamRotations(
14055 camera3_stream_configuration_t *streamList)
14056{
14057 int rc = NO_ERROR;
14058
14059 /*
14060 * Loop through all streams requested in configuration
14061 * Check if unsupported rotations have been requested on any of them
14062 */
14063 for (size_t j = 0; j < streamList->num_streams; j++){
14064 camera3_stream_t *newStream = streamList->streams[j];
14065
Emilian Peev35ceeed2017-06-29 11:58:56 -070014066 switch(newStream->rotation) {
14067 case CAMERA3_STREAM_ROTATION_0:
14068 case CAMERA3_STREAM_ROTATION_90:
14069 case CAMERA3_STREAM_ROTATION_180:
14070 case CAMERA3_STREAM_ROTATION_270:
14071 //Expected values
14072 break;
14073 default:
14074 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
14075 "type:%d and stream format:%d", __func__,
14076 newStream->rotation, newStream->stream_type,
14077 newStream->format);
14078 return -EINVAL;
14079 }
14080
Thierry Strudel3d639192016-09-09 11:52:26 -070014081 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
14082 bool isImplDef = (newStream->format ==
14083 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
14084 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
14085 isImplDef);
14086
14087 if (isRotated && (!isImplDef || isZsl)) {
14088 LOGE("Error: Unsupported rotation of %d requested for stream"
14089 "type:%d and stream format:%d",
14090 newStream->rotation, newStream->stream_type,
14091 newStream->format);
14092 rc = -EINVAL;
14093 break;
14094 }
14095 }
14096
14097 return rc;
14098}
14099
14100/*===========================================================================
14101* FUNCTION : getFlashInfo
14102*
14103* DESCRIPTION: Retrieve information about whether the device has a flash.
14104*
14105* PARAMETERS :
14106* @cameraId : Camera id to query
14107* @hasFlash : Boolean indicating whether there is a flash device
14108* associated with given camera
14109* @flashNode : If a flash device exists, this will be its device node.
14110*
14111* RETURN :
14112* None
14113*==========================================================================*/
14114void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
14115 bool& hasFlash,
14116 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
14117{
14118 cam_capability_t* camCapability = gCamCapability[cameraId];
14119 if (NULL == camCapability) {
14120 hasFlash = false;
14121 flashNode[0] = '\0';
14122 } else {
14123 hasFlash = camCapability->flash_available;
14124 strlcpy(flashNode,
14125 (char*)camCapability->flash_dev_name,
14126 QCAMERA_MAX_FILEPATH_LENGTH);
14127 }
14128}
14129
14130/*===========================================================================
14131* FUNCTION : getEepromVersionInfo
14132*
14133* DESCRIPTION: Retrieve version info of the sensor EEPROM data
14134*
14135* PARAMETERS : None
14136*
14137* RETURN : string describing EEPROM version
14138* "\0" if no such info available
14139*==========================================================================*/
14140const char *QCamera3HardwareInterface::getEepromVersionInfo()
14141{
14142 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14143}
14144
14145/*===========================================================================
14146* FUNCTION : getLdafCalib
14147*
14148* DESCRIPTION: Retrieve Laser AF calibration data
14149*
14150* PARAMETERS : None
14151*
14152* RETURN : Two uint32_t describing laser AF calibration data
14153* NULL if none is available.
14154*==========================================================================*/
14155const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14156{
14157 if (mLdafCalibExist) {
14158 return &mLdafCalib[0];
14159 } else {
14160 return NULL;
14161 }
14162}
14163
14164/*===========================================================================
Arnd Geis082a4d72017-08-24 10:33:07 -070014165* FUNCTION : getEaselFwVersion
14166*
14167* DESCRIPTION: Retrieve Easel firmware version
14168*
14169* PARAMETERS : None
14170*
14171* RETURN : string describing Firmware version
Arnd Geis8cbfc182017-09-07 14:46:41 -070014172* "\0" if version is not up to date
Arnd Geis082a4d72017-08-24 10:33:07 -070014173*==========================================================================*/
14174const char *QCamera3HardwareInterface::getEaselFwVersion()
14175{
Arnd Geis8cbfc182017-09-07 14:46:41 -070014176 if (mEaselFwUpdated) {
14177 return (const char *)&mEaselFwVersion[0];
14178 } else {
14179 return NULL;
Arnd Geis082a4d72017-08-24 10:33:07 -070014180 }
Arnd Geis082a4d72017-08-24 10:33:07 -070014181}
14182
14183/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014184 * FUNCTION : dynamicUpdateMetaStreamInfo
14185 *
14186 * DESCRIPTION: This function:
14187 * (1) stops all the channels
14188 * (2) returns error on pending requests and buffers
14189 * (3) sends metastream_info in setparams
14190 * (4) starts all channels
14191 * This is useful when sensor has to be restarted to apply any
14192 * settings such as frame rate from a different sensor mode
14193 *
14194 * PARAMETERS : None
14195 *
14196 * RETURN : NO_ERROR on success
14197 * Error codes on failure
14198 *
14199 *==========================================================================*/
14200int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14201{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014202 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014203 int rc = NO_ERROR;
14204
14205 LOGD("E");
14206
14207 rc = stopAllChannels();
14208 if (rc < 0) {
14209 LOGE("stopAllChannels failed");
14210 return rc;
14211 }
14212
14213 rc = notifyErrorForPendingRequests();
14214 if (rc < 0) {
14215 LOGE("notifyErrorForPendingRequests failed");
14216 return rc;
14217 }
14218
14219 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14220 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14221 "Format:%d",
14222 mStreamConfigInfo.type[i],
14223 mStreamConfigInfo.stream_sizes[i].width,
14224 mStreamConfigInfo.stream_sizes[i].height,
14225 mStreamConfigInfo.postprocess_mask[i],
14226 mStreamConfigInfo.format[i]);
14227 }
14228
14229 /* Send meta stream info once again so that ISP can start */
14230 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14231 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14232 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14233 mParameters);
14234 if (rc < 0) {
14235 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14236 }
14237
14238 rc = startAllChannels();
14239 if (rc < 0) {
14240 LOGE("startAllChannels failed");
14241 return rc;
14242 }
14243
14244 LOGD("X");
14245 return rc;
14246}
14247
14248/*===========================================================================
14249 * FUNCTION : stopAllChannels
14250 *
14251 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14252 *
14253 * PARAMETERS : None
14254 *
14255 * RETURN : NO_ERROR on success
14256 * Error codes on failure
14257 *
14258 *==========================================================================*/
14259int32_t QCamera3HardwareInterface::stopAllChannels()
14260{
14261 int32_t rc = NO_ERROR;
14262
14263 LOGD("Stopping all channels");
14264 // Stop the Streams/Channels
14265 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14266 it != mStreamInfo.end(); it++) {
14267 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14268 if (channel) {
14269 channel->stop();
14270 }
14271 (*it)->status = INVALID;
14272 }
14273
14274 if (mSupportChannel) {
14275 mSupportChannel->stop();
14276 }
14277 if (mAnalysisChannel) {
14278 mAnalysisChannel->stop();
14279 }
14280 if (mRawDumpChannel) {
14281 mRawDumpChannel->stop();
14282 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014283 if (mHdrPlusRawSrcChannel) {
14284 mHdrPlusRawSrcChannel->stop();
14285 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014286 if (mMetadataChannel) {
14287 /* If content of mStreamInfo is not 0, there is metadata stream */
14288 mMetadataChannel->stop();
14289 }
14290
14291 LOGD("All channels stopped");
14292 return rc;
14293}
14294
14295/*===========================================================================
14296 * FUNCTION : startAllChannels
14297 *
14298 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14299 *
14300 * PARAMETERS : None
14301 *
14302 * RETURN : NO_ERROR on success
14303 * Error codes on failure
14304 *
14305 *==========================================================================*/
14306int32_t QCamera3HardwareInterface::startAllChannels()
14307{
14308 int32_t rc = NO_ERROR;
14309
14310 LOGD("Start all channels ");
14311 // Start the Streams/Channels
14312 if (mMetadataChannel) {
14313 /* If content of mStreamInfo is not 0, there is metadata stream */
14314 rc = mMetadataChannel->start();
14315 if (rc < 0) {
14316 LOGE("META channel start failed");
14317 return rc;
14318 }
14319 }
14320 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14321 it != mStreamInfo.end(); it++) {
14322 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14323 if (channel) {
14324 rc = channel->start();
14325 if (rc < 0) {
14326 LOGE("channel start failed");
14327 return rc;
14328 }
14329 }
14330 }
14331 if (mAnalysisChannel) {
14332 mAnalysisChannel->start();
14333 }
14334 if (mSupportChannel) {
14335 rc = mSupportChannel->start();
14336 if (rc < 0) {
14337 LOGE("Support channel start failed");
14338 return rc;
14339 }
14340 }
14341 if (mRawDumpChannel) {
14342 rc = mRawDumpChannel->start();
14343 if (rc < 0) {
14344 LOGE("RAW dump channel start failed");
14345 return rc;
14346 }
14347 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014348 if (mHdrPlusRawSrcChannel) {
14349 rc = mHdrPlusRawSrcChannel->start();
14350 if (rc < 0) {
14351 LOGE("HDR+ RAW channel start failed");
14352 return rc;
14353 }
14354 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014355
14356 LOGD("All channels started");
14357 return rc;
14358}
14359
14360/*===========================================================================
14361 * FUNCTION : notifyErrorForPendingRequests
14362 *
14363 * DESCRIPTION: This function sends error for all the pending requests/buffers
14364 *
14365 * PARAMETERS : None
14366 *
14367 * RETURN : Error codes
14368 * NO_ERROR on success
14369 *
14370 *==========================================================================*/
14371int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14372{
Emilian Peev7650c122017-01-19 08:24:33 -080014373 notifyErrorFoPendingDepthData(mDepthChannel);
14374
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014375 auto pendingRequest = mPendingRequestsList.begin();
14376 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014377
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014378 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14379 // buffers (for which buffers aren't sent yet).
14380 while (pendingRequest != mPendingRequestsList.end() ||
14381 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14382 if (pendingRequest == mPendingRequestsList.end() ||
14383 pendingBuffer->frame_number < pendingRequest->frame_number) {
14384 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14385 // with error.
14386 for (auto &info : pendingBuffer->mPendingBufferList) {
14387 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014388 camera3_notify_msg_t notify_msg;
14389 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14390 notify_msg.type = CAMERA3_MSG_ERROR;
14391 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014392 notify_msg.message.error.error_stream = info.stream;
14393 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014394 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014395
14396 camera3_stream_buffer_t buffer = {};
14397 buffer.acquire_fence = -1;
14398 buffer.release_fence = -1;
14399 buffer.buffer = info.buffer;
14400 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14401 buffer.stream = info.stream;
14402 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014403 }
14404
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014405 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14406 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14407 pendingBuffer->frame_number > pendingRequest->frame_number) {
14408 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014409 camera3_notify_msg_t notify_msg;
14410 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14411 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014412 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14413 notify_msg.message.error.error_stream = nullptr;
14414 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014415 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014416
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014417 if (pendingRequest->input_buffer != nullptr) {
14418 camera3_capture_result result = {};
14419 result.frame_number = pendingRequest->frame_number;
14420 result.result = nullptr;
14421 result.input_buffer = pendingRequest->input_buffer;
14422 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014423 }
14424
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014425 mShutterDispatcher.clear(pendingRequest->frame_number);
14426 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14427 } else {
14428 // If both buffers and result metadata weren't sent yet, notify about a request error
14429 // and return buffers with error.
14430 for (auto &info : pendingBuffer->mPendingBufferList) {
14431 camera3_notify_msg_t notify_msg;
14432 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14433 notify_msg.type = CAMERA3_MSG_ERROR;
14434 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14435 notify_msg.message.error.error_stream = info.stream;
14436 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14437 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014438
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014439 camera3_stream_buffer_t buffer = {};
14440 buffer.acquire_fence = -1;
14441 buffer.release_fence = -1;
14442 buffer.buffer = info.buffer;
14443 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14444 buffer.stream = info.stream;
14445 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14446 }
14447
14448 if (pendingRequest->input_buffer != nullptr) {
14449 camera3_capture_result result = {};
14450 result.frame_number = pendingRequest->frame_number;
14451 result.result = nullptr;
14452 result.input_buffer = pendingRequest->input_buffer;
14453 orchestrateResult(&result);
14454 }
14455
14456 mShutterDispatcher.clear(pendingRequest->frame_number);
14457 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14458 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014459 }
14460 }
14461
14462 /* Reset pending frame Drop list and requests list */
14463 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014464 mShutterDispatcher.clear();
14465 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014466 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014467 mExpectedFrameDuration = 0;
14468 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014469 LOGH("Cleared all the pending buffers ");
14470
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014471 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014472}
14473
14474bool QCamera3HardwareInterface::isOnEncoder(
14475 const cam_dimension_t max_viewfinder_size,
14476 uint32_t width, uint32_t height)
14477{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014478 return ((width > (uint32_t)max_viewfinder_size.width) ||
14479 (height > (uint32_t)max_viewfinder_size.height) ||
14480 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14481 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014482}
14483
14484/*===========================================================================
14485 * FUNCTION : setBundleInfo
14486 *
14487 * DESCRIPTION: Set bundle info for all streams that are bundle.
14488 *
14489 * PARAMETERS : None
14490 *
14491 * RETURN : NO_ERROR on success
14492 * Error codes on failure
14493 *==========================================================================*/
14494int32_t QCamera3HardwareInterface::setBundleInfo()
14495{
14496 int32_t rc = NO_ERROR;
14497
14498 if (mChannelHandle) {
14499 cam_bundle_config_t bundleInfo;
14500 memset(&bundleInfo, 0, sizeof(bundleInfo));
14501 rc = mCameraHandle->ops->get_bundle_info(
14502 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14503 if (rc != NO_ERROR) {
14504 LOGE("get_bundle_info failed");
14505 return rc;
14506 }
14507 if (mAnalysisChannel) {
14508 mAnalysisChannel->setBundleInfo(bundleInfo);
14509 }
14510 if (mSupportChannel) {
14511 mSupportChannel->setBundleInfo(bundleInfo);
14512 }
14513 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14514 it != mStreamInfo.end(); it++) {
14515 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14516 channel->setBundleInfo(bundleInfo);
14517 }
14518 if (mRawDumpChannel) {
14519 mRawDumpChannel->setBundleInfo(bundleInfo);
14520 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014521 if (mHdrPlusRawSrcChannel) {
14522 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14523 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014524 }
14525
14526 return rc;
14527}
14528
14529/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014530 * FUNCTION : setInstantAEC
14531 *
14532 * DESCRIPTION: Set Instant AEC related params.
14533 *
14534 * PARAMETERS :
14535 * @meta: CameraMetadata reference
14536 *
14537 * RETURN : NO_ERROR on success
14538 * Error codes on failure
14539 *==========================================================================*/
14540int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14541{
14542 int32_t rc = NO_ERROR;
14543 uint8_t val = 0;
14544 char prop[PROPERTY_VALUE_MAX];
14545
14546 // First try to configure instant AEC from framework metadata
14547 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14548 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14549 }
14550
14551 // If framework did not set this value, try to read from set prop.
14552 if (val == 0) {
14553 memset(prop, 0, sizeof(prop));
14554 property_get("persist.camera.instant.aec", prop, "0");
14555 val = (uint8_t)atoi(prop);
14556 }
14557
14558 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14559 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14560 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14561 mInstantAEC = val;
14562 mInstantAECSettledFrameNumber = 0;
14563 mInstantAecFrameIdxCount = 0;
14564 LOGH("instantAEC value set %d",val);
14565 if (mInstantAEC) {
14566 memset(prop, 0, sizeof(prop));
14567 property_get("persist.camera.ae.instant.bound", prop, "10");
14568 int32_t aec_frame_skip_cnt = atoi(prop);
14569 if (aec_frame_skip_cnt >= 0) {
14570 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14571 } else {
14572 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14573 rc = BAD_VALUE;
14574 }
14575 }
14576 } else {
14577 LOGE("Bad instant aec value set %d", val);
14578 rc = BAD_VALUE;
14579 }
14580 return rc;
14581}
14582
14583/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014584 * FUNCTION : get_num_overall_buffers
14585 *
14586 * DESCRIPTION: Estimate number of pending buffers across all requests.
14587 *
14588 * PARAMETERS : None
14589 *
14590 * RETURN : Number of overall pending buffers
14591 *
14592 *==========================================================================*/
14593uint32_t PendingBuffersMap::get_num_overall_buffers()
14594{
14595 uint32_t sum_buffers = 0;
14596 for (auto &req : mPendingBuffersInRequest) {
14597 sum_buffers += req.mPendingBufferList.size();
14598 }
14599 return sum_buffers;
14600}
14601
14602/*===========================================================================
14603 * FUNCTION : removeBuf
14604 *
14605 * DESCRIPTION: Remove a matching buffer from tracker.
14606 *
14607 * PARAMETERS : @buffer: image buffer for the callback
14608 *
14609 * RETURN : None
14610 *
14611 *==========================================================================*/
14612void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14613{
14614 bool buffer_found = false;
14615 for (auto req = mPendingBuffersInRequest.begin();
14616 req != mPendingBuffersInRequest.end(); req++) {
14617 for (auto k = req->mPendingBufferList.begin();
14618 k != req->mPendingBufferList.end(); k++ ) {
14619 if (k->buffer == buffer) {
14620 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14621 req->frame_number, buffer);
14622 k = req->mPendingBufferList.erase(k);
14623 if (req->mPendingBufferList.empty()) {
14624 // Remove this request from Map
14625 req = mPendingBuffersInRequest.erase(req);
14626 }
14627 buffer_found = true;
14628 break;
14629 }
14630 }
14631 if (buffer_found) {
14632 break;
14633 }
14634 }
14635 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14636 get_num_overall_buffers());
14637}
14638
14639/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014640 * FUNCTION : getBufErrStatus
14641 *
14642 * DESCRIPTION: get buffer error status
14643 *
14644 * PARAMETERS : @buffer: buffer handle
14645 *
14646 * RETURN : Error status
14647 *
14648 *==========================================================================*/
14649int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14650{
14651 for (auto& req : mPendingBuffersInRequest) {
14652 for (auto& k : req.mPendingBufferList) {
14653 if (k.buffer == buffer)
14654 return k.bufStatus;
14655 }
14656 }
14657 return CAMERA3_BUFFER_STATUS_OK;
14658}
14659
14660/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014661 * FUNCTION : setPAAFSupport
14662 *
14663 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14664 * feature mask according to stream type and filter
14665 * arrangement
14666 *
14667 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14668 * @stream_type: stream type
14669 * @filter_arrangement: filter arrangement
14670 *
14671 * RETURN : None
14672 *==========================================================================*/
14673void QCamera3HardwareInterface::setPAAFSupport(
14674 cam_feature_mask_t& feature_mask,
14675 cam_stream_type_t stream_type,
14676 cam_color_filter_arrangement_t filter_arrangement)
14677{
Thierry Strudel3d639192016-09-09 11:52:26 -070014678 switch (filter_arrangement) {
14679 case CAM_FILTER_ARRANGEMENT_RGGB:
14680 case CAM_FILTER_ARRANGEMENT_GRBG:
14681 case CAM_FILTER_ARRANGEMENT_GBRG:
14682 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014683 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14684 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014685 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014686 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14687 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014688 }
14689 break;
14690 case CAM_FILTER_ARRANGEMENT_Y:
14691 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14692 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14693 }
14694 break;
14695 default:
14696 break;
14697 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014698 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14699 feature_mask, stream_type, filter_arrangement);
14700
14701
Thierry Strudel3d639192016-09-09 11:52:26 -070014702}
14703
14704/*===========================================================================
14705* FUNCTION : getSensorMountAngle
14706*
14707* DESCRIPTION: Retrieve sensor mount angle
14708*
14709* PARAMETERS : None
14710*
14711* RETURN : sensor mount angle in uint32_t
14712*==========================================================================*/
14713uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14714{
14715 return gCamCapability[mCameraId]->sensor_mount_angle;
14716}
14717
14718/*===========================================================================
14719* FUNCTION : getRelatedCalibrationData
14720*
14721* DESCRIPTION: Retrieve related system calibration data
14722*
14723* PARAMETERS : None
14724*
14725* RETURN : Pointer of related system calibration data
14726*==========================================================================*/
14727const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14728{
14729 return (const cam_related_system_calibration_data_t *)
14730 &(gCamCapability[mCameraId]->related_cam_calibration);
14731}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014732
14733/*===========================================================================
14734 * FUNCTION : is60HzZone
14735 *
14736 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14737 *
14738 * PARAMETERS : None
14739 *
14740 * RETURN : True if in 60Hz zone, False otherwise
14741 *==========================================================================*/
14742bool QCamera3HardwareInterface::is60HzZone()
14743{
14744 time_t t = time(NULL);
14745 struct tm lt;
14746
14747 struct tm* r = localtime_r(&t, &lt);
14748
14749 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14750 return true;
14751 else
14752 return false;
14753}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014754
14755/*===========================================================================
14756 * FUNCTION : adjustBlackLevelForCFA
14757 *
14758 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14759 * of bayer CFA (Color Filter Array).
14760 *
14761 * PARAMETERS : @input: black level pattern in the order of RGGB
14762 * @output: black level pattern in the order of CFA
14763 * @color_arrangement: CFA color arrangement
14764 *
14765 * RETURN : None
14766 *==========================================================================*/
14767template<typename T>
14768void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14769 T input[BLACK_LEVEL_PATTERN_CNT],
14770 T output[BLACK_LEVEL_PATTERN_CNT],
14771 cam_color_filter_arrangement_t color_arrangement)
14772{
14773 switch (color_arrangement) {
14774 case CAM_FILTER_ARRANGEMENT_GRBG:
14775 output[0] = input[1];
14776 output[1] = input[0];
14777 output[2] = input[3];
14778 output[3] = input[2];
14779 break;
14780 case CAM_FILTER_ARRANGEMENT_GBRG:
14781 output[0] = input[2];
14782 output[1] = input[3];
14783 output[2] = input[0];
14784 output[3] = input[1];
14785 break;
14786 case CAM_FILTER_ARRANGEMENT_BGGR:
14787 output[0] = input[3];
14788 output[1] = input[2];
14789 output[2] = input[1];
14790 output[3] = input[0];
14791 break;
14792 case CAM_FILTER_ARRANGEMENT_RGGB:
14793 output[0] = input[0];
14794 output[1] = input[1];
14795 output[2] = input[2];
14796 output[3] = input[3];
14797 break;
14798 default:
14799 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14800 break;
14801 }
14802}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014803
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014804void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14805 CameraMetadata &resultMetadata,
14806 std::shared_ptr<metadata_buffer_t> settings)
14807{
14808 if (settings == nullptr) {
14809 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14810 return;
14811 }
14812
14813 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14814 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014815 } else {
14816 resultMetadata.erase(ANDROID_JPEG_GPS_COORDINATES);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014817 }
14818
14819 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14820 String8 str((const char *)gps_methods);
14821 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014822 } else {
14823 resultMetadata.erase(ANDROID_JPEG_GPS_PROCESSING_METHOD);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014824 }
14825
14826 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14827 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014828 } else {
14829 resultMetadata.erase(ANDROID_JPEG_GPS_TIMESTAMP);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014830 }
14831
14832 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14833 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014834 } else {
14835 resultMetadata.erase(ANDROID_JPEG_ORIENTATION);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014836 }
14837
14838 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14839 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14840 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014841 } else {
14842 resultMetadata.erase(ANDROID_JPEG_QUALITY);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014843 }
14844
14845 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14846 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14847 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014848 } else {
14849 resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_QUALITY);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014850 }
14851
14852 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14853 int32_t fwk_thumb_size[2];
14854 fwk_thumb_size[0] = thumb_size->width;
14855 fwk_thumb_size[1] = thumb_size->height;
14856 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014857 } else {
14858 resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_SIZE);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014859 }
14860
14861 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14862 uint8_t fwk_intent = intent[0];
14863 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014864 } else {
14865 resultMetadata.erase(ANDROID_CONTROL_CAPTURE_INTENT);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014866 }
14867}
14868
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014869bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
14870 const camera3_capture_request_t &request, const CameraMetadata &metadata) {
Chien-Yu Chenec328c82017-08-30 16:41:08 -070014871 if (metadata.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
14872 metadata.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 1) {
14873 ALOGV("%s: NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS is 1", __FUNCTION__);
14874 return false;
14875 }
14876
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014877 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14878 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14879 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014880 ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014881 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014882 return false;
14883 }
14884
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014885 if (!metadata.exists(ANDROID_EDGE_MODE) ||
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014886 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14887 ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014888 return false;
14889 }
14890
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014891 if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
14892 metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
14893 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
14894 ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
14895 return false;
14896 }
14897
14898 if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
14899 (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
14900 metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
14901 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
14902 ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
14903 return false;
14904 }
14905
14906 if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
14907 metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
14908 ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
14909 return false;
14910 }
14911
14912 if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
14913 metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
14914 ANDROID_CONTROL_EFFECT_MODE_OFF) {
14915 ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
14916 return false;
14917 }
14918
14919 if (!metadata.exists(ANDROID_CONTROL_MODE) ||
14920 (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
14921 metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
14922 ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
14923 ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
14924 return false;
14925 }
14926
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014927 // TODO (b/66500626): support AE compensation.
14928 if (!metadata.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION) ||
14929 metadata.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0] != 0) {
14930 ALOGV("%s: ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION is not 0.", __FUNCTION__);
14931 return false;
14932 }
14933
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014934 // TODO (b/32585046): support non-ZSL.
14935 if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
14936 metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
14937 ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
14938 return false;
14939 }
14940
14941 // TODO (b/32586081): support flash.
14942 if (!metadata.exists(ANDROID_FLASH_MODE) ||
14943 metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
14944 ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
14945 return false;
14946 }
14947
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014948 if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
14949 metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
14950 ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
14951 return false;
14952 }
14953
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014954
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014955 // TODO (b/36693254, b/36690506): support other outputs.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014956 if (!gEnableMultipleHdrplusOutputs && request.num_output_buffers != 1) {
14957 ALOGV("%s: Only support 1 output: %d", __FUNCTION__, request.num_output_buffers);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014958 return false;
14959 }
14960
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014961 switch (request.output_buffers[0].stream->format) {
14962 case HAL_PIXEL_FORMAT_BLOB:
14963 break;
14964 case HAL_PIXEL_FORMAT_YCbCr_420_888:
14965 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
14966 // TODO (b/36693254): Only support full size.
14967 if (!gEnableMultipleHdrplusOutputs) {
14968 if (static_cast<int>(request.output_buffers[0].stream->width) !=
14969 gCamCapability[mCameraId]->picture_sizes_tbl[0].width ||
14970 static_cast<int>(request.output_buffers[0].stream->height) !=
14971 gCamCapability[mCameraId]->picture_sizes_tbl[0].height) {
14972 ALOGV("%s: Only full size is supported.", __FUNCTION__);
14973 return false;
14974 }
14975 }
14976 break;
14977 default:
14978 ALOGV("%s: Not an HDR+ request: Only Jpeg and YUV output is supported.", __FUNCTION__);
14979 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14980 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14981 request.output_buffers[0].stream->width,
14982 request.output_buffers[0].stream->height,
14983 request.output_buffers[0].stream->format);
14984 }
14985 return false;
14986 }
14987
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014988 return true;
14989}
14990
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014991void QCamera3HardwareInterface::abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest) {
14992 if (hdrPlusRequest == nullptr) return;
14993
14994 for (auto & outputBufferIter : hdrPlusRequest->outputBuffers) {
14995 // Find the stream for this buffer.
14996 for (auto streamInfo : mStreamInfo) {
14997 if (streamInfo->id == outputBufferIter.first) {
14998 if (streamInfo->channel == mPictureChannel) {
14999 // For picture channel, this buffer is internally allocated so return this
15000 // buffer to picture channel.
15001 mPictureChannel->returnYuvBuffer(outputBufferIter.second.get());
15002 } else {
15003 // Unregister this buffer for other channels.
15004 streamInfo->channel->unregisterBuffer(outputBufferIter.second.get());
15005 }
15006 break;
15007 }
15008 }
15009 }
15010
15011 hdrPlusRequest->outputBuffers.clear();
15012 hdrPlusRequest->frameworkOutputBuffers.clear();
15013}
15014
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070015015bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
15016 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
15017 const CameraMetadata &metadata)
15018{
15019 if (hdrPlusRequest == nullptr) return false;
15020 if (!isRequestHdrPlusCompatible(request, metadata)) return false;
15021
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015022 status_t res = OK;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015023 pbcamera::CaptureRequest pbRequest;
15024 pbRequest.id = request.frame_number;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015025 // Iterate through all requested output buffers and add them to an HDR+ request.
15026 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
15027 // Find the index of the stream in mStreamInfo.
15028 uint32_t pbStreamId = 0;
15029 bool found = false;
15030 for (auto streamInfo : mStreamInfo) {
15031 if (streamInfo->stream == request.output_buffers[i].stream) {
15032 pbStreamId = streamInfo->id;
15033 found = true;
15034 break;
15035 }
15036 }
15037
15038 if (!found) {
15039 ALOGE("%s: requested stream was not configured.", __FUNCTION__);
15040 abortPendingHdrplusRequest(hdrPlusRequest);
15041 return false;
15042 }
15043 auto outBuffer = std::make_shared<mm_camera_buf_def_t>();
15044 switch (request.output_buffers[i].stream->format) {
15045 case HAL_PIXEL_FORMAT_BLOB:
15046 {
15047 // For jpeg output, get a YUV buffer from pic channel.
15048 QCamera3PicChannel *picChannel =
15049 (QCamera3PicChannel*)request.output_buffers[i].stream->priv;
15050 res = picChannel->getYuvBufferForRequest(outBuffer.get(), request.frame_number);
15051 if (res != OK) {
15052 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
15053 __FUNCTION__, strerror(-res), res);
15054 abortPendingHdrplusRequest(hdrPlusRequest);
15055 return false;
15056 }
15057 break;
15058 }
15059 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15060 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15061 {
15062 // For YUV output, register the buffer and get the buffer def from the channel.
15063 QCamera3ProcessingChannel *channel =
15064 (QCamera3ProcessingChannel*)request.output_buffers[i].stream->priv;
15065 res = channel->registerBufferAndGetBufDef(request.output_buffers[i].buffer,
15066 outBuffer.get());
15067 if (res != OK) {
15068 ALOGE("%s: Getting the buffer def failed: %s (%d)", __FUNCTION__,
15069 strerror(-res), res);
15070 abortPendingHdrplusRequest(hdrPlusRequest);
15071 return false;
15072 }
15073 break;
15074 }
15075 default:
15076 abortPendingHdrplusRequest(hdrPlusRequest);
15077 return false;
15078 }
15079
15080 pbcamera::StreamBuffer buffer;
15081 buffer.streamId = pbStreamId;
15082 buffer.dmaBufFd = outBuffer->fd;
15083 buffer.data = outBuffer->fd == -1 ? outBuffer->buffer : nullptr;
15084 buffer.dataSize = outBuffer->frame_len;
15085
15086 pbRequest.outputBuffers.push_back(buffer);
15087
15088 hdrPlusRequest->outputBuffers.emplace(pbStreamId, outBuffer);
15089 hdrPlusRequest->frameworkOutputBuffers.emplace(pbStreamId, request.output_buffers[i]);
15090 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015091
15092 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070015093 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015094 if (res != OK) {
15095 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
15096 strerror(-res), res);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015097 abortPendingHdrplusRequest(hdrPlusRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015098 return false;
15099 }
15100
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015101 return true;
15102}
15103
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015104status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
15105{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015106 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
15107 return OK;
15108 }
15109
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015110 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015111 if (res != OK) {
15112 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
15113 strerror(-res), res);
15114 return res;
15115 }
15116 gHdrPlusClientOpening = true;
15117
15118 return OK;
15119}
15120
Chien-Yu Chenee335912017-02-09 17:53:20 -080015121status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
15122{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015123 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015124
Chien-Yu Chena6c99062017-05-23 13:45:06 -070015125 if (mHdrPlusModeEnabled) {
15126 return OK;
15127 }
15128
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015129 // Check if gHdrPlusClient is opened or being opened.
15130 if (gHdrPlusClient == nullptr) {
15131 if (gHdrPlusClientOpening) {
15132 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
15133 return OK;
15134 }
15135
15136 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015137 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015138 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
15139 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015140 return res;
15141 }
15142
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015143 // When opening HDR+ client completes, HDR+ mode will be enabled.
15144 return OK;
15145
Chien-Yu Chenee335912017-02-09 17:53:20 -080015146 }
15147
15148 // Configure stream for HDR+.
15149 res = configureHdrPlusStreamsLocked();
15150 if (res != OK) {
15151 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015152 return res;
15153 }
15154
15155 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
15156 res = gHdrPlusClient->setZslHdrPlusMode(true);
15157 if (res != OK) {
15158 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080015159 return res;
15160 }
15161
15162 mHdrPlusModeEnabled = true;
15163 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
15164
15165 return OK;
15166}
15167
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015168void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
15169{
15170 if (gHdrPlusClientOpening) {
15171 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
15172 }
15173}
15174
Chien-Yu Chenee335912017-02-09 17:53:20 -080015175void QCamera3HardwareInterface::disableHdrPlusModeLocked()
15176{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015177 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015178 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015179 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
15180 if (res != OK) {
15181 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
15182 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015183
15184 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015185 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015186 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015187 }
15188
15189 mHdrPlusModeEnabled = false;
15190 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
15191}
15192
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015193bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
15194{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015195 // Check that at least one YUV or one JPEG output is configured.
15196 // TODO: Support RAW (b/36690506)
15197 for (auto streamInfo : mStreamInfo) {
15198 if (streamInfo != nullptr && streamInfo->stream != nullptr) {
15199 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
15200 (streamInfo->stream->format == HAL_PIXEL_FORMAT_BLOB ||
15201 streamInfo->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15202 streamInfo->stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
15203 return true;
15204 }
15205 }
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015206 }
15207
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015208 return false;
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015209}
15210
Chien-Yu Chenee335912017-02-09 17:53:20 -080015211status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015212{
15213 pbcamera::InputConfiguration inputConfig;
15214 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
15215 status_t res = OK;
15216
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015217 // Sensor MIPI will send data to Easel.
15218 inputConfig.isSensorInput = true;
15219 inputConfig.sensorMode.cameraId = mCameraId;
15220 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
15221 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
15222 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
15223 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
15224 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
15225 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenc8b6ad02017-09-15 13:50:26 -070015226 inputConfig.sensorMode.timestampCropOffsetNs = mSensorModeInfo.timestamp_crop_offset;
15227
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015228 if (mSensorModeInfo.num_raw_bits != 10) {
15229 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
15230 mSensorModeInfo.num_raw_bits);
15231 return BAD_VALUE;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015232 }
15233
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015234 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015235
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015236 // Iterate through configured output streams in HAL and configure those streams in HDR+
15237 // service.
15238 for (auto streamInfo : mStreamInfo) {
15239 pbcamera::StreamConfiguration outputConfig;
15240 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT) {
15241 switch (streamInfo->stream->format) {
15242 case HAL_PIXEL_FORMAT_BLOB:
15243 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15244 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15245 res = fillPbStreamConfig(&outputConfig, streamInfo->id,
15246 streamInfo->channel, /*stream index*/0);
15247 if (res != OK) {
15248 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
15249 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015250
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015251 return res;
15252 }
15253
15254 outputStreamConfigs.push_back(outputConfig);
15255 break;
15256 default:
15257 // TODO: handle RAW16 outputs if mRawChannel was created. (b/36690506)
15258 break;
15259 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015260 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015261 }
15262
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015263 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015264 if (res != OK) {
15265 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
15266 strerror(-res), res);
15267 return res;
15268 }
15269
15270 return OK;
15271}
15272
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015273void QCamera3HardwareInterface::handleEaselFatalError()
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015274{
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015275 pthread_mutex_lock(&mMutex);
15276 mState = ERROR;
15277 pthread_mutex_unlock(&mMutex);
15278
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015279 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015280}
15281
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015282void QCamera3HardwareInterface::handleEaselFatalErrorAsync()
15283{
15284 if (mEaselErrorFuture.valid()) {
15285 // The error future has been invoked.
15286 return;
15287 }
15288
15289 // Launch a future to handle the fatal error.
15290 mEaselErrorFuture = std::async(std::launch::async,
15291 &QCamera3HardwareInterface::handleEaselFatalError, this);
15292}
15293
15294void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
15295{
15296 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
15297 handleEaselFatalErrorAsync();
15298}
15299
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015300void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15301{
Arnd Geis8cbfc182017-09-07 14:46:41 -070015302 int rc = NO_ERROR;
15303
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015304 if (client == nullptr) {
15305 ALOGE("%s: Opened client is null.", __FUNCTION__);
15306 return;
15307 }
15308
Chien-Yu Chene96475e2017-04-11 11:53:26 -070015309 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015310 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15311
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015312 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015313 if (!gHdrPlusClientOpening) {
15314 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15315 return;
15316 }
15317
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015318 gHdrPlusClient = std::move(client);
15319 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015320 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015321
15322 // Set static metadata.
15323 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15324 if (res != OK) {
15325 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15326 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015327 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015328 gHdrPlusClient = nullptr;
15329 return;
15330 }
15331
15332 // Enable HDR+ mode.
15333 res = enableHdrPlusModeLocked();
15334 if (res != OK) {
15335 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15336 }
Arnd Geis8cbfc182017-09-07 14:46:41 -070015337
15338 // Get Easel firmware version
15339 if (EaselManagerClientOpened) {
15340 rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
15341 if (rc != OK) {
15342 ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
15343 } else {
15344 mEaselFwUpdated = true;
15345 }
15346 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015347}
15348
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015349void QCamera3HardwareInterface::onOpenFailed(status_t err)
15350{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015351 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015352 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015353 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015354 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015355}
15356
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015357void QCamera3HardwareInterface::onFatalError()
15358{
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015359 ALOGE("%s: HDR+ client encountered a fatal error.", __FUNCTION__);
15360 handleEaselFatalErrorAsync();
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015361}
15362
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070015363void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15364{
15365 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15366 __LINE__, requestId, apSensorTimestampNs);
15367
15368 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15369}
15370
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015371void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
15372{
15373 pthread_mutex_lock(&mMutex);
15374
15375 // Find the pending request for this result metadata.
15376 auto requestIter = mPendingRequestsList.begin();
15377 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15378 requestIter++;
15379 }
15380
15381 if (requestIter == mPendingRequestsList.end()) {
15382 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15383 pthread_mutex_unlock(&mMutex);
15384 return;
15385 }
15386
15387 requestIter->partial_result_cnt++;
15388
15389 CameraMetadata metadata;
15390 uint8_t ready = true;
15391 metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
15392
15393 // Send it to framework.
15394 camera3_capture_result_t result = {};
15395
15396 result.result = metadata.getAndLock();
15397 // Populate metadata result
15398 result.frame_number = requestId;
15399 result.num_output_buffers = 0;
15400 result.output_buffers = NULL;
15401 result.partial_result = requestIter->partial_result_cnt;
15402
15403 orchestrateResult(&result);
15404 metadata.unlock(result.result);
15405
15406 pthread_mutex_unlock(&mMutex);
15407}
15408
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070015409void QCamera3HardwareInterface::onPostview(uint32_t requestId,
15410 std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
15411 uint32_t stride, int32_t format)
15412{
15413 if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
15414 ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
15415 __LINE__, width, height, requestId);
15416 char buf[FILENAME_MAX] = {};
15417 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
15418 requestId, width, height);
15419
15420 pbcamera::StreamConfiguration config = {};
15421 config.image.width = width;
15422 config.image.height = height;
15423 config.image.format = format;
15424
15425 pbcamera::PlaneConfiguration plane = {};
15426 plane.stride = stride;
15427 plane.scanline = height;
15428
15429 config.image.planes.push_back(plane);
15430
15431 pbcamera::StreamBuffer buffer = {};
15432 buffer.streamId = 0;
15433 buffer.dmaBufFd = -1;
15434 buffer.data = postview->data();
15435 buffer.dataSize = postview->size();
15436
15437 hdrplus_client_utils::writePpm(buf, config, buffer);
15438 }
15439
15440 pthread_mutex_lock(&mMutex);
15441
15442 // Find the pending request for this result metadata.
15443 auto requestIter = mPendingRequestsList.begin();
15444 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15445 requestIter++;
15446 }
15447
15448 if (requestIter == mPendingRequestsList.end()) {
15449 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15450 pthread_mutex_unlock(&mMutex);
15451 return;
15452 }
15453
15454 requestIter->partial_result_cnt++;
15455
15456 CameraMetadata metadata;
15457 int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
15458 static_cast<int32_t>(stride)};
15459 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
15460 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
15461
15462 // Send it to framework.
15463 camera3_capture_result_t result = {};
15464
15465 result.result = metadata.getAndLock();
15466 // Populate metadata result
15467 result.frame_number = requestId;
15468 result.num_output_buffers = 0;
15469 result.output_buffers = NULL;
15470 result.partial_result = requestIter->partial_result_cnt;
15471
15472 orchestrateResult(&result);
15473 metadata.unlock(result.result);
15474
15475 pthread_mutex_unlock(&mMutex);
15476}
15477
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015478void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015479 const camera_metadata_t &resultMetadata)
15480{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015481 if (result == nullptr) {
15482 ALOGE("%s: result is nullptr.", __FUNCTION__);
15483 return;
15484 }
15485
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015486 // Find the pending HDR+ request.
15487 HdrPlusPendingRequest pendingRequest;
15488 {
15489 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15490 auto req = mHdrPlusPendingRequests.find(result->requestId);
15491 pendingRequest = req->second;
15492 }
15493
15494 // Update the result metadata with the settings of the HDR+ still capture request because
15495 // the result metadata belongs to a ZSL buffer.
15496 CameraMetadata metadata;
15497 metadata = &resultMetadata;
15498 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15499 camera_metadata_t* updatedResultMetadata = metadata.release();
15500
15501 uint32_t halSnapshotStreamId = 0;
15502 if (mPictureChannel != nullptr) {
15503 halSnapshotStreamId = mPictureChannel->getStreamID(mPictureChannel->getStreamTypeMask());
15504 }
15505
15506 auto halMetadata = std::make_shared<metadata_buffer_t>();
15507 clear_metadata_buffer(halMetadata.get());
15508
15509 // Convert updated result metadata to HAL metadata.
15510 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15511 halSnapshotStreamId, /*minFrameDuration*/0);
15512 if (res != 0) {
15513 ALOGE("%s: Translating metadata failed: %s (%d)", __FUNCTION__, strerror(-res), res);
15514 }
15515
15516 for (auto &outputBuffer : result->outputBuffers) {
15517 uint32_t streamId = outputBuffer.streamId;
15518
15519 // Find the framework output buffer in the pending request.
15520 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15521 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15522 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15523 streamId);
15524 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015525 }
15526
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015527 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15528
15529 // Find the channel for the output buffer.
15530 QCamera3ProcessingChannel *channel =
15531 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15532
15533 // Find the output buffer def.
15534 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15535 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15536 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15537 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015538 }
15539
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015540 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015541
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015542 // Check whether to dump the buffer.
15543 if (frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15544 frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
15545 // If the stream format is YUV or jpeg, check if dumping HDR+ YUV output is enabled.
15546 char prop[PROPERTY_VALUE_MAX];
15547 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15548 bool dumpYuvOutput = atoi(prop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015549
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015550 if (dumpYuvOutput) {
15551 // Dump yuv buffer to a ppm file.
15552 pbcamera::StreamConfiguration outputConfig;
15553 status_t rc = fillPbStreamConfig(&outputConfig, streamId,
15554 channel, /*stream index*/0);
15555 if (rc == OK) {
15556 char buf[FILENAME_MAX] = {};
15557 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15558 result->requestId, streamId,
15559 outputConfig.image.width, outputConfig.image.height);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015560
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015561 hdrplus_client_utils::writePpm(buf, outputConfig, outputBuffer);
15562 } else {
15563 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: "
15564 "%s (%d).", __FUNCTION__, strerror(-rc), rc);
15565 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015566 }
15567 }
15568
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015569 if (channel == mPictureChannel) {
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015570 // Return the buffer to pic channel for encoding.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015571 mPictureChannel->returnYuvBufferAndEncode(outputBufferDef.get(),
15572 frameworkOutputBuffer->buffer, result->requestId,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015573 halMetadata);
15574 } else {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015575 // Return the buffer to camera framework.
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015576 pthread_mutex_lock(&mMutex);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015577 handleBufferWithLock(frameworkOutputBuffer, result->requestId);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015578 channel->unregisterBuffer(outputBufferDef.get());
Chien-Yu Chen0c8eaaa2017-09-19 14:13:14 -070015579 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015580 }
15581 }
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015582
15583 // Send HDR+ metadata to framework.
15584 {
15585 pthread_mutex_lock(&mMutex);
15586
15587 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15588 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
15589 pthread_mutex_unlock(&mMutex);
15590 }
15591
15592 // Remove the HDR+ pending request.
15593 {
15594 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15595 auto req = mHdrPlusPendingRequests.find(result->requestId);
15596 mHdrPlusPendingRequests.erase(req);
15597 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015598}
15599
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015600void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15601{
15602 if (failedResult == nullptr) {
15603 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15604 return;
15605 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015606
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015607 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015608
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015609 // Find the pending HDR+ request.
15610 HdrPlusPendingRequest pendingRequest;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015611 {
15612 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015613 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15614 if (req == mHdrPlusPendingRequests.end()) {
15615 ALOGE("%s: Couldn't find pending request %d", __FUNCTION__, failedResult->requestId);
15616 return;
15617 }
15618 pendingRequest = req->second;
15619 }
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015620
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015621 for (auto &outputBuffer : failedResult->outputBuffers) {
15622 uint32_t streamId = outputBuffer.streamId;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015623
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015624 // Find the channel
15625 // Find the framework output buffer in the pending request.
15626 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15627 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15628 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15629 streamId);
15630 continue;
15631 }
15632
15633 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15634
15635 // Find the channel for the output buffer.
15636 QCamera3ProcessingChannel *channel =
15637 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15638
15639 // Find the output buffer def.
15640 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15641 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15642 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15643 continue;
15644 }
15645
15646 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
15647
15648 if (channel == mPictureChannel) {
15649 // Return the buffer to pic channel.
15650 mPictureChannel->returnYuvBuffer(outputBufferDef.get());
15651 } else {
15652 channel->unregisterBuffer(outputBufferDef.get());
15653 }
15654 }
15655
15656 // Remove the HDR+ pending request.
15657 {
15658 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15659 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15660 mHdrPlusPendingRequests.erase(req);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015661 }
15662
15663 pthread_mutex_lock(&mMutex);
15664
15665 // Find the pending buffers.
15666 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15667 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15668 if (pendingBuffers->frame_number == failedResult->requestId) {
15669 break;
15670 }
15671 pendingBuffers++;
15672 }
15673
15674 // Send out buffer errors for the pending buffers.
15675 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15676 std::vector<camera3_stream_buffer_t> streamBuffers;
15677 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15678 // Prepare a stream buffer.
15679 camera3_stream_buffer_t streamBuffer = {};
15680 streamBuffer.stream = buffer.stream;
15681 streamBuffer.buffer = buffer.buffer;
15682 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15683 streamBuffer.acquire_fence = -1;
15684 streamBuffer.release_fence = -1;
15685
15686 streamBuffers.push_back(streamBuffer);
15687
15688 // Send out error buffer event.
15689 camera3_notify_msg_t notify_msg = {};
15690 notify_msg.type = CAMERA3_MSG_ERROR;
15691 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15692 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15693 notify_msg.message.error.error_stream = buffer.stream;
15694
15695 orchestrateNotify(&notify_msg);
15696 }
15697
15698 camera3_capture_result_t result = {};
15699 result.frame_number = pendingBuffers->frame_number;
15700 result.num_output_buffers = streamBuffers.size();
15701 result.output_buffers = &streamBuffers[0];
15702
15703 // Send out result with buffer errors.
15704 orchestrateResult(&result);
15705
15706 // Remove pending buffers.
15707 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15708 }
15709
15710 // Remove pending request.
15711 auto halRequest = mPendingRequestsList.begin();
15712 while (halRequest != mPendingRequestsList.end()) {
15713 if (halRequest->frame_number == failedResult->requestId) {
15714 mPendingRequestsList.erase(halRequest);
15715 break;
15716 }
15717 halRequest++;
15718 }
15719
15720 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015721}
15722
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015723
15724ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15725 mParent(parent) {}
15726
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015727void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015728{
15729 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015730
15731 if (isReprocess) {
15732 mReprocessShutters.emplace(frameNumber, Shutter());
15733 } else {
15734 mShutters.emplace(frameNumber, Shutter());
15735 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015736}
15737
15738void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15739{
15740 std::lock_guard<std::mutex> lock(mLock);
15741
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015742 std::map<uint32_t, Shutter> *shutters = nullptr;
15743
15744 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015745 auto shutter = mShutters.find(frameNumber);
15746 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015747 shutter = mReprocessShutters.find(frameNumber);
15748 if (shutter == mReprocessShutters.end()) {
15749 // Shutter was already sent.
15750 return;
15751 }
15752 shutters = &mReprocessShutters;
15753 } else {
15754 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015755 }
15756
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015757 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015758 shutter->second.ready = true;
15759 shutter->second.timestamp = timestamp;
15760
15761 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015762 shutter = shutters->begin();
15763 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015764 if (!shutter->second.ready) {
15765 // If this shutter is not ready, the following shutters can't be sent.
15766 break;
15767 }
15768
15769 camera3_notify_msg_t msg = {};
15770 msg.type = CAMERA3_MSG_SHUTTER;
15771 msg.message.shutter.frame_number = shutter->first;
15772 msg.message.shutter.timestamp = shutter->second.timestamp;
15773 mParent->orchestrateNotify(&msg);
15774
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015775 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015776 }
15777}
15778
15779void ShutterDispatcher::clear(uint32_t frameNumber)
15780{
15781 std::lock_guard<std::mutex> lock(mLock);
15782 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015783 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015784}
15785
15786void ShutterDispatcher::clear()
15787{
15788 std::lock_guard<std::mutex> lock(mLock);
15789
15790 // Log errors for stale shutters.
15791 for (auto &shutter : mShutters) {
15792 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15793 __FUNCTION__, shutter.first, shutter.second.ready,
15794 shutter.second.timestamp);
15795 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015796
15797 // Log errors for stale reprocess shutters.
15798 for (auto &shutter : mReprocessShutters) {
15799 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15800 __FUNCTION__, shutter.first, shutter.second.ready,
15801 shutter.second.timestamp);
15802 }
15803
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015804 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015805 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015806}
15807
15808OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15809 mParent(parent) {}
15810
15811status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15812{
15813 std::lock_guard<std::mutex> lock(mLock);
15814 mStreamBuffers.clear();
15815 if (!streamList) {
15816 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15817 return -EINVAL;
15818 }
15819
15820 // Create a "frame-number -> buffer" map for each stream.
15821 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15822 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15823 }
15824
15825 return OK;
15826}
15827
15828status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15829{
15830 std::lock_guard<std::mutex> lock(mLock);
15831
15832 // Find the "frame-number -> buffer" map for the stream.
15833 auto buffers = mStreamBuffers.find(stream);
15834 if (buffers == mStreamBuffers.end()) {
15835 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15836 return -EINVAL;
15837 }
15838
15839 // Create an unready buffer for this frame number.
15840 buffers->second.emplace(frameNumber, Buffer());
15841 return OK;
15842}
15843
15844void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15845 const camera3_stream_buffer_t &buffer)
15846{
15847 std::lock_guard<std::mutex> lock(mLock);
15848
15849 // Find the frame number -> buffer map for the stream.
15850 auto buffers = mStreamBuffers.find(buffer.stream);
15851 if (buffers == mStreamBuffers.end()) {
15852 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15853 return;
15854 }
15855
15856 // Find the unready buffer this frame number and mark it ready.
15857 auto pendingBuffer = buffers->second.find(frameNumber);
15858 if (pendingBuffer == buffers->second.end()) {
15859 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15860 return;
15861 }
15862
15863 pendingBuffer->second.ready = true;
15864 pendingBuffer->second.buffer = buffer;
15865
15866 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15867 pendingBuffer = buffers->second.begin();
15868 while (pendingBuffer != buffers->second.end()) {
15869 if (!pendingBuffer->second.ready) {
15870 // If this buffer is not ready, the following buffers can't be sent.
15871 break;
15872 }
15873
15874 camera3_capture_result_t result = {};
15875 result.frame_number = pendingBuffer->first;
15876 result.num_output_buffers = 1;
15877 result.output_buffers = &pendingBuffer->second.buffer;
15878
15879 // Send out result with buffer errors.
15880 mParent->orchestrateResult(&result);
15881
15882 pendingBuffer = buffers->second.erase(pendingBuffer);
15883 }
15884}
15885
15886void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15887{
15888 std::lock_guard<std::mutex> lock(mLock);
15889
15890 // Log errors for stale buffers.
15891 for (auto &buffers : mStreamBuffers) {
15892 for (auto &buffer : buffers.second) {
15893 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15894 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15895 }
15896 buffers.second.clear();
15897 }
15898
15899 if (clearConfiguredStreams) {
15900 mStreamBuffers.clear();
15901 }
15902}
15903
Thierry Strudel3d639192016-09-09 11:52:26 -070015904}; //end namespace qcamera